2
0

memcheck_analyze.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644
  1. #!/usr/bin/env python
  2. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
  3. #
  4. # Use of this source code is governed by a BSD-style license
  5. # that can be found in the LICENSE file in the root of the source
  6. # tree. An additional intellectual property rights grant can be found
  7. # in the file PATENTS. All contributing project authors may
  8. # be found in the AUTHORS file in the root of the source tree.
  9. # memcheck_analyze.py
  10. ''' Given a valgrind XML file, parses errors and uniques them.'''
  11. import gdb_helper
  12. from collections import defaultdict
  13. import hashlib
  14. import logging
  15. import optparse
  16. import os
  17. import re
  18. import subprocess
  19. import sys
  20. import time
  21. from xml.dom.minidom import parse
  22. from xml.parsers.expat import ExpatError
  23. import common
  24. # Global symbol table (yuck)
  25. TheAddressTable = None
  26. # These are regexps that define functions (using C++ mangled names)
  27. # we don't want to see in stack traces while pretty printing
  28. # or generating suppressions.
  29. # Just stop printing the stack/suppression frames when the current one
  30. # matches any of these.
  31. _BORING_CALLERS = common.BoringCallers(mangled=True, use_re_wildcards=True)
  32. def getTextOf(top_node, name):
  33. ''' Returns all text in all DOM nodes with a certain |name| that are children
  34. of |top_node|.
  35. '''
  36. text = ""
  37. for nodes_named in top_node.getElementsByTagName(name):
  38. text += "".join([node.data for node in nodes_named.childNodes
  39. if node.nodeType == node.TEXT_NODE])
  40. return text
  41. def getCDATAOf(top_node, name):
  42. ''' Returns all CDATA in all DOM nodes with a certain |name| that are children
  43. of |top_node|.
  44. '''
  45. text = ""
  46. for nodes_named in top_node.getElementsByTagName(name):
  47. text += "".join([node.data for node in nodes_named.childNodes
  48. if node.nodeType == node.CDATA_SECTION_NODE])
  49. if (text == ""):
  50. return None
  51. return text
  52. def shortenFilePath(source_dir, directory):
  53. '''Returns a string with the string prefix |source_dir| removed from
  54. |directory|.'''
  55. prefixes_to_cut = ["build/src/", "valgrind/coregrind/", "out/Release/../../"]
  56. if source_dir:
  57. prefixes_to_cut.append(source_dir)
  58. for p in prefixes_to_cut:
  59. index = directory.rfind(p)
  60. if index != -1:
  61. directory = directory[index + len(p):]
  62. return directory
  63. # Constants that give real names to the abbreviations in valgrind XML output.
  64. INSTRUCTION_POINTER = "ip"
  65. OBJECT_FILE = "obj"
  66. FUNCTION_NAME = "fn"
  67. SRC_FILE_DIR = "dir"
  68. SRC_FILE_NAME = "file"
  69. SRC_LINE = "line"
  70. def gatherFrames(node, source_dir):
  71. frames = []
  72. for frame in node.getElementsByTagName("frame"):
  73. frame_dict = {
  74. INSTRUCTION_POINTER : getTextOf(frame, INSTRUCTION_POINTER),
  75. OBJECT_FILE : getTextOf(frame, OBJECT_FILE),
  76. FUNCTION_NAME : getTextOf(frame, FUNCTION_NAME),
  77. SRC_FILE_DIR : shortenFilePath(
  78. source_dir, getTextOf(frame, SRC_FILE_DIR)),
  79. SRC_FILE_NAME : getTextOf(frame, SRC_FILE_NAME),
  80. SRC_LINE : getTextOf(frame, SRC_LINE)
  81. }
  82. # Ignore this frame and all the following if it's a "boring" function.
  83. enough_frames = False
  84. for regexp in _BORING_CALLERS:
  85. if re.match("^%s$" % regexp, frame_dict[FUNCTION_NAME]):
  86. enough_frames = True
  87. break
  88. if enough_frames:
  89. break
  90. frames += [frame_dict]
  91. global TheAddressTable
  92. if TheAddressTable != None and frame_dict[SRC_LINE] == "":
  93. # Try using gdb
  94. TheAddressTable.Add(frame_dict[OBJECT_FILE],
  95. frame_dict[INSTRUCTION_POINTER])
  96. return frames
  97. class ValgrindError:
  98. ''' Takes a <DOM Element: error> node and reads all the data from it. A
  99. ValgrindError is immutable and is hashed on its pretty printed output.
  100. '''
  101. def __init__(self, source_dir, error_node, commandline, testcase):
  102. ''' Copies all the relevant information out of the DOM and into object
  103. properties.
  104. Args:
  105. error_node: The <error></error> DOM node we're extracting from.
  106. source_dir: Prefix that should be stripped from the <dir> node.
  107. commandline: The command that was run under valgrind
  108. testcase: The test case name, if known.
  109. '''
  110. # Valgrind errors contain one <what><stack> pair, plus an optional
  111. # <auxwhat><stack> pair, plus an optional <origin><what><stack></origin>,
  112. # plus (since 3.5.0) a <suppression></suppression> pair.
  113. # (Origin is nicely enclosed; too bad the other two aren't.)
  114. # The most common way to see all three in one report is
  115. # a syscall with a parameter that points to uninitialized memory, e.g.
  116. # Format:
  117. # <error>
  118. # <unique>0x6d</unique>
  119. # <tid>1</tid>
  120. # <kind>SyscallParam</kind>
  121. # <what>Syscall param write(buf) points to uninitialised byte(s)</what>
  122. # <stack>
  123. # <frame>
  124. # ...
  125. # </frame>
  126. # </stack>
  127. # <auxwhat>Address 0x5c9af4f is 7 bytes inside a block of ...</auxwhat>
  128. # <stack>
  129. # <frame>
  130. # ...
  131. # </frame>
  132. # </stack>
  133. # <origin>
  134. # <what>Uninitialised value was created by a heap allocation</what>
  135. # <stack>
  136. # <frame>
  137. # ...
  138. # </frame>
  139. # </stack>
  140. # </origin>
  141. # <suppression>
  142. # <sname>insert_a_suppression_name_here</sname>
  143. # <skind>Memcheck:Param</skind>
  144. # <skaux>write(buf)</skaux>
  145. # <sframe> <fun>__write_nocancel</fun> </sframe>
  146. # ...
  147. # <sframe> <fun>main</fun> </sframe>
  148. # <rawtext>
  149. # <![CDATA[
  150. # {
  151. # <insert_a_suppression_name_here>
  152. # Memcheck:Param
  153. # write(buf)
  154. # fun:__write_nocancel
  155. # ...
  156. # fun:main
  157. # }
  158. # ]]>
  159. # </rawtext>
  160. # </suppression>
  161. # </error>
  162. #
  163. # Each frame looks like this:
  164. # <frame>
  165. # <ip>0x83751BC</ip>
  166. # <obj>/data/dkegel/chrome-build/src/out/Release/base_unittests</obj>
  167. # <fn>_ZN7testing8internal12TestInfoImpl7RunTestEPNS_8TestInfoE</fn>
  168. # <dir>/data/dkegel/chrome-build/src/testing/gtest/src</dir>
  169. # <file>gtest-internal-inl.h</file>
  170. # <line>655</line>
  171. # </frame>
  172. # although the dir, file, and line elements are missing if there is
  173. # no debug info.
  174. self._kind = getTextOf(error_node, "kind")
  175. self._backtraces = []
  176. self._suppression = None
  177. self._commandline = commandline
  178. self._testcase = testcase
  179. self._additional = []
  180. # Iterate through the nodes, parsing <what|auxwhat><stack> pairs.
  181. description = None
  182. for node in error_node.childNodes:
  183. if node.localName == "what" or node.localName == "auxwhat":
  184. description = "".join([n.data for n in node.childNodes
  185. if n.nodeType == n.TEXT_NODE])
  186. elif node.localName == "xwhat":
  187. description = getTextOf(node, "text")
  188. elif node.localName == "stack":
  189. assert description
  190. self._backtraces.append([description, gatherFrames(node, source_dir)])
  191. description = None
  192. elif node.localName == "origin":
  193. description = getTextOf(node, "what")
  194. stack = node.getElementsByTagName("stack")[0]
  195. frames = gatherFrames(stack, source_dir)
  196. self._backtraces.append([description, frames])
  197. description = None
  198. stack = None
  199. frames = None
  200. elif description and node.localName != None:
  201. # The lastest description has no stack, e.g. "Address 0x28 is unknown"
  202. self._additional.append(description)
  203. description = None
  204. if node.localName == "suppression":
  205. self._suppression = getCDATAOf(node, "rawtext");
  206. def __str__(self):
  207. ''' Pretty print the type and backtrace(s) of this specific error,
  208. including suppression (which is just a mangled backtrace).'''
  209. output = ""
  210. output += "\n" # Make sure the ### is at the beginning of line.
  211. output += "### BEGIN MEMORY TOOL REPORT (error hash=#%016X#)\n" % \
  212. self.ErrorHash()
  213. if (self._commandline):
  214. output += self._commandline + "\n"
  215. output += self._kind + "\n"
  216. for backtrace in self._backtraces:
  217. output += backtrace[0] + "\n"
  218. filter = subprocess.Popen("c++filt -n", stdin=subprocess.PIPE,
  219. stdout=subprocess.PIPE,
  220. stderr=subprocess.STDOUT,
  221. shell=True,
  222. close_fds=True)
  223. buf = ""
  224. for frame in backtrace[1]:
  225. buf += (frame[FUNCTION_NAME] or frame[INSTRUCTION_POINTER]) + "\n"
  226. (stdoutbuf, stderrbuf) = filter.communicate(buf.encode('latin-1'))
  227. demangled_names = stdoutbuf.split("\n")
  228. i = 0
  229. for frame in backtrace[1]:
  230. output += (" " + demangled_names[i])
  231. i = i + 1
  232. global TheAddressTable
  233. if TheAddressTable != None and frame[SRC_FILE_DIR] == "":
  234. # Try using gdb
  235. foo = TheAddressTable.GetFileLine(frame[OBJECT_FILE],
  236. frame[INSTRUCTION_POINTER])
  237. if foo[0] != None:
  238. output += (" (" + foo[0] + ":" + foo[1] + ")")
  239. elif frame[SRC_FILE_DIR] != "":
  240. output += (" (" + frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME] +
  241. ":" + frame[SRC_LINE] + ")")
  242. else:
  243. output += " (" + frame[OBJECT_FILE] + ")"
  244. output += "\n"
  245. for additional in self._additional:
  246. output += additional + "\n"
  247. assert self._suppression != None, "Your Valgrind doesn't generate " \
  248. "suppressions - is it too old?"
  249. if self._testcase:
  250. output += "The report came from the `%s` test.\n" % self._testcase
  251. output += "Suppression (error hash=#%016X#):\n" % self.ErrorHash()
  252. output += (" For more info on using suppressions see "
  253. "http://dev.chromium.org/developers/tree-sheriffs/sheriff-details-chromium/memory-sheriff#TOC-Suppressing-memory-reports")
  254. # Widen suppression slightly to make portable between mac and linux
  255. # TODO(timurrrr): Oops, these transformations should happen
  256. # BEFORE calculating the hash!
  257. supp = self._suppression;
  258. supp = supp.replace("fun:_Znwj", "fun:_Znw*")
  259. supp = supp.replace("fun:_Znwm", "fun:_Znw*")
  260. supp = supp.replace("fun:_Znaj", "fun:_Zna*")
  261. supp = supp.replace("fun:_Znam", "fun:_Zna*")
  262. # Make suppressions even less platform-dependent.
  263. for sz in [1, 2, 4, 8]:
  264. supp = supp.replace("Memcheck:Addr%d" % sz, "Memcheck:Unaddressable")
  265. supp = supp.replace("Memcheck:Value%d" % sz, "Memcheck:Uninitialized")
  266. supp = supp.replace("Memcheck:Cond", "Memcheck:Uninitialized")
  267. # Split into lines so we can enforce length limits
  268. supplines = supp.split("\n")
  269. supp = None # to avoid re-use
  270. # Truncate at line 26 (VG_MAX_SUPP_CALLERS plus 2 for name and type)
  271. # or at the first 'boring' caller.
  272. # (https://bugs.kde.org/show_bug.cgi?id=199468 proposes raising
  273. # VG_MAX_SUPP_CALLERS, but we're probably fine with it as is.)
  274. newlen = min(26, len(supplines));
  275. # Drop boring frames and all the following.
  276. enough_frames = False
  277. for frameno in range(newlen):
  278. for boring_caller in _BORING_CALLERS:
  279. if re.match("^ +fun:%s$" % boring_caller, supplines[frameno]):
  280. newlen = frameno
  281. enough_frames = True
  282. break
  283. if enough_frames:
  284. break
  285. if (len(supplines) > newlen):
  286. supplines = supplines[0:newlen]
  287. supplines.append("}")
  288. for frame in range(len(supplines)):
  289. # Replace the always-changing anonymous namespace prefix with "*".
  290. m = re.match("( +fun:)_ZN.*_GLOBAL__N_.*\.cc_" +
  291. "[0-9a-fA-F]{8}_[0-9a-fA-F]{8}(.*)",
  292. supplines[frame])
  293. if m:
  294. supplines[frame] = "*".join(m.groups())
  295. output += "\n".join(supplines) + "\n"
  296. output += "### END MEMORY TOOL REPORT (error hash=#%016X#)\n" % \
  297. self.ErrorHash()
  298. return output
  299. def UniqueString(self):
  300. ''' String to use for object identity. Don't print this, use str(obj)
  301. instead.'''
  302. rep = self._kind + " "
  303. for backtrace in self._backtraces:
  304. for frame in backtrace[1]:
  305. rep += frame[FUNCTION_NAME]
  306. if frame[SRC_FILE_DIR] != "":
  307. rep += frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME]
  308. else:
  309. rep += frame[OBJECT_FILE]
  310. return rep
  311. # This is a device-independent hash identifying the suppression.
  312. # By printing out this hash we can find duplicate reports between tests and
  313. # different shards running on multiple buildbots
  314. def ErrorHash(self):
  315. return int(hashlib.md5(self.UniqueString()).hexdigest()[:16], 16)
  316. def __hash__(self):
  317. return hash(self.UniqueString())
  318. def __eq__(self, rhs):
  319. return self.UniqueString() == rhs
  320. def log_is_finished(f, force_finish):
  321. f.seek(0)
  322. prev_line = ""
  323. while True:
  324. line = f.readline()
  325. if line == "":
  326. if not force_finish:
  327. return False
  328. # Okay, the log is not finished but we can make it up to be parseable:
  329. if prev_line.strip() in ["</error>", "</errorcounts>", "</status>"]:
  330. f.write("</valgrindoutput>\n")
  331. return True
  332. return False
  333. if '</valgrindoutput>' in line:
  334. # Valgrind often has garbage after </valgrindoutput> upon crash.
  335. f.truncate()
  336. return True
  337. prev_line = line
  338. class MemcheckAnalyzer:
  339. ''' Given a set of Valgrind XML files, parse all the errors out of them,
  340. unique them and output the results.'''
  341. SANITY_TEST_SUPPRESSIONS = {
  342. "Memcheck sanity test 01 (memory leak).": 1,
  343. "Memcheck sanity test 02 (malloc/read left).": 1,
  344. "Memcheck sanity test 03 (malloc/read right).": 1,
  345. "Memcheck sanity test 04 (malloc/write left).": 1,
  346. "Memcheck sanity test 05 (malloc/write right).": 1,
  347. "Memcheck sanity test 06 (new/read left).": 1,
  348. "Memcheck sanity test 07 (new/read right).": 1,
  349. "Memcheck sanity test 08 (new/write left).": 1,
  350. "Memcheck sanity test 09 (new/write right).": 1,
  351. "Memcheck sanity test 10 (write after free).": 1,
  352. "Memcheck sanity test 11 (write after delete).": 1,
  353. "Memcheck sanity test 12 (array deleted without []).": 1,
  354. "Memcheck sanity test 13 (single element deleted with []).": 1,
  355. "Memcheck sanity test 14 (malloc/read uninit).": 1,
  356. "Memcheck sanity test 15 (new/read uninit).": 1,
  357. }
  358. # Max time to wait for memcheck logs to complete.
  359. LOG_COMPLETION_TIMEOUT = 180.0
  360. def __init__(self, source_dir, show_all_leaks=False, use_gdb=False):
  361. '''Create a parser for Memcheck logs.
  362. Args:
  363. source_dir: Path to top of source tree for this build
  364. show_all_leaks: Whether to show even less important leaks
  365. use_gdb: Whether to use gdb to resolve source filenames and line numbers
  366. in the report stacktraces
  367. '''
  368. self._source_dir = source_dir
  369. self._show_all_leaks = show_all_leaks
  370. self._use_gdb = use_gdb
  371. # Contains the set of unique errors
  372. self._errors = set()
  373. # Contains the time when the we started analyzing the first log file.
  374. # This variable is used to skip incomplete logs after some timeout.
  375. self._analyze_start_time = None
  376. def Report(self, files, testcase, check_sanity=False):
  377. '''Reads in a set of files and prints Memcheck report.
  378. Args:
  379. files: A list of filenames.
  380. check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS
  381. '''
  382. # Beyond the detailed errors parsed by ValgrindError above,
  383. # the xml file contain records describing suppressions that were used:
  384. # <suppcounts>
  385. # <pair>
  386. # <count>28</count>
  387. # <name>pango_font_leak_todo</name>
  388. # </pair>
  389. # <pair>
  390. # <count>378</count>
  391. # <name>bug_13243</name>
  392. # </pair>
  393. # </suppcounts
  394. # Collect these and print them at the end.
  395. #
  396. # With our patch for https://bugs.kde.org/show_bug.cgi?id=205000 in,
  397. # the file also includes records of the form
  398. # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj>
  399. # giving the filename and load address of each binary that was mapped
  400. # into the process.
  401. global TheAddressTable
  402. if self._use_gdb:
  403. TheAddressTable = gdb_helper.AddressTable()
  404. else:
  405. TheAddressTable = None
  406. cur_report_errors = set()
  407. suppcounts = defaultdict(int)
  408. badfiles = set()
  409. if self._analyze_start_time == None:
  410. self._analyze_start_time = time.time()
  411. start_time = self._analyze_start_time
  412. parse_failed = False
  413. for file in files:
  414. # Wait up to three minutes for valgrind to finish writing all files,
  415. # but after that, just skip incomplete files and warn.
  416. f = open(file, "r+")
  417. pid = re.match(".*\.([0-9]+)$", file)
  418. if pid:
  419. pid = pid.groups()[0]
  420. found = False
  421. running = True
  422. firstrun = True
  423. skip = False
  424. origsize = os.path.getsize(file)
  425. while (running and not found and not skip and
  426. (firstrun or
  427. ((time.time() - start_time) < self.LOG_COMPLETION_TIMEOUT))):
  428. firstrun = False
  429. f.seek(0)
  430. if pid:
  431. # Make sure the process is still running so we don't wait for
  432. # 3 minutes if it was killed. See http://crbug.com/17453
  433. ps_out = subprocess.Popen("ps p %s" % pid, shell=True,
  434. stdout=subprocess.PIPE).stdout
  435. if len(ps_out.readlines()) < 2:
  436. running = False
  437. else:
  438. skip = True
  439. running = False
  440. found = log_is_finished(f, False)
  441. if not running and not found:
  442. logging.warn("Valgrind process PID = %s is not running but its "
  443. "XML log has not been finished correctly.\n"
  444. "Make it up by adding some closing tags manually." % pid)
  445. found = log_is_finished(f, not running)
  446. if running and not found:
  447. time.sleep(1)
  448. f.close()
  449. if not found:
  450. badfiles.add(file)
  451. else:
  452. newsize = os.path.getsize(file)
  453. if origsize > newsize+1:
  454. logging.warn(str(origsize - newsize) +
  455. " bytes of junk were after </valgrindoutput> in %s!" %
  456. file)
  457. try:
  458. parsed_file = parse(file);
  459. except ExpatError, e:
  460. parse_failed = True
  461. logging.warn("could not parse %s: %s" % (file, e))
  462. lineno = e.lineno - 1
  463. context_lines = 5
  464. context_start = max(0, lineno - context_lines)
  465. context_end = lineno + context_lines + 1
  466. context_file = open(file, "r")
  467. for i in range(0, context_start):
  468. context_file.readline()
  469. for i in range(context_start, context_end):
  470. context_data = context_file.readline().rstrip()
  471. if i != lineno:
  472. logging.warn(" %s" % context_data)
  473. else:
  474. logging.warn("> %s" % context_data)
  475. context_file.close()
  476. continue
  477. if TheAddressTable != None:
  478. load_objs = parsed_file.getElementsByTagName("load_obj")
  479. for load_obj in load_objs:
  480. obj = getTextOf(load_obj, "obj")
  481. ip = getTextOf(load_obj, "ip")
  482. TheAddressTable.AddBinaryAt(obj, ip)
  483. commandline = None
  484. preamble = parsed_file.getElementsByTagName("preamble")[0];
  485. for node in preamble.getElementsByTagName("line"):
  486. if node.localName == "line":
  487. for x in node.childNodes:
  488. if x.nodeType == node.TEXT_NODE and "Command" in x.data:
  489. commandline = x.data
  490. break
  491. raw_errors = parsed_file.getElementsByTagName("error")
  492. for raw_error in raw_errors:
  493. # Ignore "possible" leaks for now by default.
  494. if (self._show_all_leaks or
  495. getTextOf(raw_error, "kind") != "Leak_PossiblyLost"):
  496. error = ValgrindError(self._source_dir,
  497. raw_error, commandline, testcase)
  498. if error not in cur_report_errors:
  499. # We haven't seen such errors doing this report yet...
  500. if error in self._errors:
  501. # ... but we saw it in earlier reports, e.g. previous UI test
  502. cur_report_errors.add("This error was already printed in "
  503. "some other test, see 'hash=#%016X#'" % \
  504. error.ErrorHash())
  505. else:
  506. # ... and we haven't seen it in other tests as well
  507. self._errors.add(error)
  508. cur_report_errors.add(error)
  509. suppcountlist = parsed_file.getElementsByTagName("suppcounts")
  510. if len(suppcountlist) > 0:
  511. suppcountlist = suppcountlist[0]
  512. for node in suppcountlist.getElementsByTagName("pair"):
  513. count = getTextOf(node, "count");
  514. name = getTextOf(node, "name");
  515. suppcounts[name] += int(count)
  516. if len(badfiles) > 0:
  517. logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles))
  518. for file in badfiles:
  519. logging.warn("Last 20 lines of %s :" % file)
  520. os.system("tail -n 20 '%s' 1>&2" % file)
  521. if parse_failed:
  522. logging.error("FAIL! Couldn't parse Valgrind output file")
  523. return -2
  524. common.PrintUsedSuppressionsList(suppcounts)
  525. retcode = 0
  526. if cur_report_errors:
  527. logging.error("FAIL! There were %s errors: " % len(cur_report_errors))
  528. if TheAddressTable != None:
  529. TheAddressTable.ResolveAll()
  530. for error in cur_report_errors:
  531. logging.error(error)
  532. retcode = -1
  533. # Report tool's insanity even if there were errors.
  534. if check_sanity:
  535. remaining_sanity_supp = MemcheckAnalyzer.SANITY_TEST_SUPPRESSIONS
  536. for (name, count) in suppcounts.iteritems():
  537. # Workaround for http://crbug.com/334074
  538. if (name in remaining_sanity_supp and
  539. remaining_sanity_supp[name] <= count):
  540. del remaining_sanity_supp[name]
  541. if remaining_sanity_supp:
  542. logging.error("FAIL! Sanity check failed!")
  543. logging.info("The following test errors were not handled: ")
  544. for (name, count) in remaining_sanity_supp.iteritems():
  545. logging.info(" * %dx %s" % (count, name))
  546. retcode = -3
  547. if retcode != 0:
  548. return retcode
  549. logging.info("PASS! No errors found!")
  550. return 0
  551. def _main():
  552. '''For testing only. The MemcheckAnalyzer class should be imported instead.'''
  553. parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
  554. parser.add_option("", "--source-dir",
  555. help="path to top of source tree for this build"
  556. "(used to normalize source paths in baseline)")
  557. (options, args) = parser.parse_args()
  558. if len(args) == 0:
  559. parser.error("no filename specified")
  560. filenames = args
  561. analyzer = MemcheckAnalyzer(options.source_dir, use_gdb=True)
  562. return analyzer.Report(filenames, None)
  563. if __name__ == "__main__":
  564. sys.exit(_main())