merge_js_lib.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374
  1. # Copyright 2020 The Chromium Authors. All rights reserved.
  2. # Use of this source code is governed by a BSD-style license that can be
  3. # found in the LICENSE file.
  4. """Functions to merge multiple JavaScript coverage files into one"""
  5. import logging
  6. import json
  7. import os
  8. import sys
  9. _HERE_PATH = os.path.dirname(__file__)
  10. _THIRD_PARTY_PATH = os.path.normpath(
  11. os.path.join(_HERE_PATH, '..', '..', '..', 'third_party'))
  12. sys.path.append(os.path.join(_THIRD_PARTY_PATH, 'node'))
  13. sys.path.append(os.path.join(_THIRD_PARTY_PATH, 'js_code_coverage'))
  14. import node
  15. import coverage_modules
  16. logging.basicConfig(format='[%(asctime)s %(levelname)s] %(message)s',
  17. level=logging.DEBUG)
  18. def _parse_json_file(path):
  19. """Opens file and parses data into JSON
  20. Args:
  21. path (str): The path to a JSON file to parse.
  22. """
  23. with open(path, 'r') as json_file:
  24. return json.load(json_file)
  25. def _peek_last(stack):
  26. """Returns the top element of stack or None"""
  27. return stack[-1] if stack else None
  28. def _convert_to_disjoint_segments(ranges):
  29. """Converts a list of v8 CoverageRanges into a list of disjoint segments.
  30. A v8 CoverageRange is a JSON object that describes the start and end
  31. character offsets for a block of instrumented JavaScript code:
  32. https://chromedevtools.github.io/devtools-protocol/tot/Profiler/#type-CoverageRange
  33. CoverageRange is defined by the ranges field from a v8 FunctionCoverage:
  34. https://chromedevtools.github.io/devtools-protocol/tot/Profiler/#type-FunctionCoverage
  35. To compute the list of disjoint segments, we sort (must be a stable sort)
  36. the |ranges| list in ascending order by their startOffset. This
  37. has the effect of bringing CoverageRange groups closer together. Each
  38. group of CoverageRange's has a recursive relationship such that:
  39. - The first range in the group defines the character offsets for the
  40. function we are capturing coverage for
  41. - Children of this range identify unexcuted code unless they are
  42. also parents, in which case they continue the recursive relationship
  43. To give an example, consider the following arrow function:
  44. exports.test = arg => { return arg ? 'y' : 'n' }
  45. An invocation of test(true) would produce the following |ranges|
  46. [
  47. { "startOffset": 0, "endOffset": 48, "count": 1 }, // Range 1
  48. { "startOffset": 15, "endOffset": 48, "count": 1 }, // Range 2
  49. { "startOffset": 41, "endOffset": 46, "count": 0 }, // Range 3
  50. ]
  51. Range 1 identifies the entire script.
  52. Range 2 identifies the function from the arg parameter through
  53. to the closing brace
  54. Range 3 identifies that the code from offset [41, 46) was
  55. not executed.
  56. If we were to make the function calls, e.g. test(true); test(true);
  57. this would produce the following |ranges|
  58. [
  59. { "startOffset": 0, "endOffset": 48, "count": 1 }, // Range 1
  60. { "startOffset": 15, "endOffset": 48, "count": 2 }, // Range 2
  61. { "startOffset": 41, "endOffset": 46, "count": 0 }, // Range 3
  62. ]
  63. All the offsets are maintained, however the count on Range
  64. 2 has increased while the count on Range 1 is unchanged. This
  65. shows another implicit assumption such that the inner most parent
  66. range count identifies the total invocation count.
  67. TODO(benreich): Write up more extensive documentation.
  68. Args:
  69. ranges (list): A list of v8 CoverageRange that have been
  70. merged from multiple FunctionCoverage. The order in which they
  71. appear in the original v8 coverage output must be maintained.
  72. Returns:
  73. A list of dictionaries where each entry is defined as:
  74. {
  75. count: Number of invocations of this range
  76. end: Exclusive character offset for the end of this range
  77. }
  78. """
  79. stack = []
  80. segments = []
  81. # pylint: disable=unsupported-assignment-operation
  82. def _append(end, count):
  83. """Append a new range segment to |segments|.
  84. If the top range on |segments| has the same ending as |end|
  85. return early, otherwise extend the segment if the same count
  86. exists.
  87. Args:
  88. end (number): The end character offset for the range
  89. count (number): The invocation count for the range
  90. """
  91. last = _peek_last(segments)
  92. if last is not None:
  93. if last['end'] == end:
  94. return
  95. if last['count'] == count:
  96. last['end'] = end
  97. return
  98. if end == 0:
  99. return
  100. segments.append({'end': end, 'count': count})
  101. return
  102. # Stable sort the range segments.
  103. ranges.sort(key=lambda entry: entry['startOffset'])
  104. for entry in ranges:
  105. top = _peek_last(stack)
  106. while top and top['endOffset'] <= entry['startOffset']:
  107. _append(top['endOffset'], top['count'])
  108. stack.pop()
  109. top = _peek_last(stack)
  110. top_count = 0 if not top else top['count']
  111. _append(entry['startOffset'], top_count)
  112. stack.append(entry)
  113. while stack:
  114. top = stack.pop()
  115. _append(top['endOffset'], top['count'])
  116. return segments
  117. # pylint: enable=unsupported-assignment-operation
  118. # pylint: disable=unsupported-assignment-operation
  119. def _merge_segments(segments_a, segments_b):
  120. """Merges 2 lists of disjoint segments into one
  121. Take in two lists that have been output by _convert_to_disjoint_segments
  122. and merge them into a single list. Any segments that are
  123. overlapping sum their invocation counts. If the overlap
  124. is partial, split the ranges into contiguous segments and
  125. assign the invocation counts appropriately.
  126. Args:
  127. segments_a (list): A list of disjoint segments.
  128. segments_b (list): A list of disjoint segments.
  129. Returns:
  130. A list of disjoint segments.
  131. """
  132. segments = []
  133. i = 0
  134. j = 0
  135. while i < len(segments_a) and j < len(segments_b):
  136. a = segments_a[i]
  137. b = segments_b[j]
  138. count = a.get('count', 0) + b.get('count', 0)
  139. end = min(a['end'], b['end'])
  140. last = _peek_last(segments)
  141. # Get the segment from the top of the stack and
  142. # extend the segment if the invocation counts match
  143. # otherwise push a new range segment onto the stack.
  144. if last is None or last['count'] != count:
  145. segments.append({'end': end, 'count': count})
  146. else:
  147. last['end'] = end
  148. if a['end'] <= b['end']:
  149. i += 1
  150. if a['end'] >= b['end']:
  151. j += 1
  152. while i < len(segments_a):
  153. segments.append(segments_a[i])
  154. i += 1
  155. while j < len(segments_b):
  156. segments.append(segments_b[j])
  157. j += 1
  158. return segments
  159. # pylint: enable=unsupported-assignment-operation
  160. def _get_paths_with_suffix(input_dir, suffix):
  161. """Gets all JSON files in the input directory.
  162. Args:
  163. input_dir (str): The path to recursively search for
  164. JSON files.
  165. Returns:
  166. A list of absolute file paths.
  167. """
  168. paths = []
  169. for dir_path, _sub_dirs, file_names in os.walk(input_dir):
  170. paths.extend([
  171. os.path.join(dir_path, fn) for fn in file_names
  172. if fn.endswith(suffix)
  173. ])
  174. return paths
  175. def merge_coverage_files(coverage_dir, output_path):
  176. """Merge all coverages in the coverage dir into a single file.
  177. Args:
  178. coverage_dir (str): Path to all the raw JavaScript coverage files.
  179. output_path (str): Path to the location to output merged coverage.
  180. """
  181. coverage_by_path = {}
  182. json_files = _get_paths_with_suffix(coverage_dir, '.cov.json')
  183. if not json_files:
  184. logging.info('No JavaScript coverage files found in %s', coverage_dir)
  185. return None
  186. for file_path in json_files:
  187. coverage_data = _parse_json_file(file_path)
  188. if 'result' not in coverage_data:
  189. raise RuntimeError('%r does not have a result field' %
  190. file_path)
  191. for script_coverage in coverage_data['result']:
  192. script_url = script_coverage['url']
  193. # Ignore files with paths that have not been rewritten.
  194. # Files can rewrite paths by appending a //# sourceURL=
  195. # comment.
  196. if not script_url.startswith('//'):
  197. continue
  198. previous_coverage = coverage_by_path.get(script_url, [])
  199. ranges = []
  200. for function_coverage in script_coverage['functions']:
  201. for range_coverage in function_coverage['ranges']:
  202. ranges.append(range_coverage)
  203. disjoint_segments = _convert_to_disjoint_segments(ranges)
  204. merged_segments = _merge_segments(previous_coverage,
  205. disjoint_segments)
  206. coverage_by_path[script_url] = merged_segments
  207. with open(output_path, 'w') as merged_coverage_file:
  208. return merged_coverage_file.write(json.dumps(coverage_by_path))
  209. def write_parsed_scripts(task_output_dir):
  210. """Extract parsed script contents and write back to original folder structure.
  211. Args:
  212. task_output_dir (str): The output directory for the sharded task. This will
  213. contain the raw JavaScript v8 parsed files that are identified by
  214. their ".js.json" suffix.
  215. Returns:
  216. The absolute file path to the raw parsed scripts or None if no parsed
  217. scripts were identified (or any of the raw data contains invalid JSON).
  218. """
  219. scripts = _get_paths_with_suffix(task_output_dir, '.js.json')
  220. output_dir = os.path.join(task_output_dir, 'parsed_scripts')
  221. if not scripts:
  222. return None
  223. for file_path in scripts:
  224. # TODO(crbug.com/1224786): Some of the raw script data is being saved with
  225. # a trailing curly brace leading to invalid JSON. Bail out if this is
  226. # encountered and ensure we log the file path.
  227. script_data = None
  228. try:
  229. script_data = _parse_json_file(file_path)
  230. except ValueError as e:
  231. logging.error('Failed to parse %s: %s', file_path, e)
  232. return None
  233. if any(key not in script_data for key in ('url', 'text')):
  234. logging.info('File %s is missing key url or text', file_path)
  235. continue
  236. if not script_data['url'].startswith('//'):
  237. continue
  238. source_path = os.path.normpath(script_data['url'].replace('//', ''))
  239. source_directory = os.path.join(output_dir, os.path.dirname(source_path))
  240. if not os.path.exists(source_directory):
  241. os.makedirs(source_directory)
  242. with open(os.path.join(output_dir, source_path), 'wb') as f:
  243. f.write(script_data['text'].encode('utf8'))
  244. return output_dir
  245. def get_raw_coverage_dirs(task_output_dir):
  246. """Returns a list of directories containing raw v8 coverage.
  247. Args:
  248. task_output_dir (str): The output directory for the sharded task. This will
  249. contain the raw JavaScript v8 coverage files that are identified by
  250. their ".cov.json" suffix.
  251. """
  252. coverage_directories = set()
  253. for dir_path, _sub_dirs, file_names in os.walk(task_output_dir):
  254. for name in file_names:
  255. if name.endswith('.cov.json'):
  256. coverage_directories.add(dir_path)
  257. continue
  258. return coverage_directories
  259. def convert_raw_coverage_to_istanbul(
  260. raw_coverage_dirs, source_dir, task_output_dir):
  261. """Calls the node helper script convert_to_istanbul.js
  262. Args:
  263. raw_coverage_dirs (list): Directory that contains raw v8 code coverage.
  264. source_dir (str): Root directory containing the instrumented source.
  265. Raises:
  266. RuntimeError: If the underlying node command fails.
  267. """
  268. return node.RunNode(
  269. [os.path.join(_HERE_PATH, 'convert_to_istanbul.js'),
  270. '--source-dir', source_dir,
  271. '--output-dir', task_output_dir,
  272. '--raw-coverage-dirs', ' '.join(raw_coverage_dirs),
  273. ])
  274. def merge_istanbul_reports(istanbul_coverage_dir, source_dir, output_file):
  275. """Merges all disparate istanbul reports into a single report.
  276. Args:
  277. istanbul_coverage_dir (str): Directory containing separate coverage files.
  278. source_dir (str): Directory containing instrumented source code.
  279. output_file (str): File path to output merged coverage.
  280. Raises:
  281. RuntimeError: If the underlying node command fails.
  282. """
  283. return node.RunNode(
  284. [coverage_modules.PathToNyc(),
  285. 'merge', istanbul_coverage_dir,
  286. output_file,
  287. '--cwd', source_dir,
  288. ])