standard_gtest_merge.py 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. #!/usr/bin/env python
  2. # Copyright 2017 The Chromium Authors. All rights reserved.
  3. # Use of this source code is governed by a BSD-style license that can be
  4. # found in the LICENSE file.
  5. from __future__ import print_function
  6. import json
  7. import os
  8. import sys
  9. import merge_api
  10. MISSING_SHARDS_MSG = r"""Missing results from the following shard(s): %s
  11. This can happen in following cases:
  12. * Test failed to start (missing *.dll/*.so dependency for example)
  13. * Test crashed or hung
  14. * Task expired because there are not enough bots available and are all used
  15. * Swarming service experienced problems
  16. Please examine logs to figure out what happened.
  17. """
  18. def emit_warning(title, log=None):
  19. print('@@@STEP_WARNINGS@@@')
  20. print(title)
  21. if log:
  22. title = title.rstrip()
  23. for line in log.splitlines():
  24. print('@@@STEP_LOG_LINE@%s@%s@@@' % (title, line.rstrip()))
  25. print('@@@STEP_LOG_END@%s@@@' % title)
  26. def merge_shard_results(summary_json, jsons_to_merge):
  27. """Reads JSON test output from all shards and combines them into one.
  28. Returns dict with merged test output on success or None on failure. Emits
  29. annotations.
  30. """
  31. # summary.json is produced by swarming client itself. We are mostly interested
  32. # in the number of shards.
  33. try:
  34. with open(summary_json) as f:
  35. summary = json.load(f)
  36. except (IOError, ValueError):
  37. emit_warning(
  38. 'summary.json is missing or can not be read',
  39. 'Something is seriously wrong with swarming client or the bot.')
  40. return None
  41. # Merge all JSON files together. Keep track of missing shards.
  42. merged = {
  43. 'all_tests': set(),
  44. 'disabled_tests': set(),
  45. 'global_tags': set(),
  46. 'missing_shards': [],
  47. 'per_iteration_data': [],
  48. 'swarming_summary': summary,
  49. 'test_locations': {},
  50. }
  51. for index, result in enumerate(summary['shards']):
  52. if result is None:
  53. merged['missing_shards'].append(index)
  54. continue
  55. # Author note: this code path doesn't trigger convert_to_old_format() in
  56. # client/swarming.py, which means the state enum is saved in its string
  57. # name form, not in the number form.
  58. state = result.get('state')
  59. if state == u'BOT_DIED':
  60. emit_warning('Shard #%d had a Swarming internal failure' % index)
  61. elif state == u'EXPIRED':
  62. emit_warning('There wasn\'t enough capacity to run your test')
  63. elif state == u'TIMED_OUT':
  64. emit_warning(
  65. 'Test runtime exceeded allocated time',
  66. 'Either it ran for too long (hard timeout) or it didn\'t produce '
  67. 'I/O for an extended period of time (I/O timeout)')
  68. elif state != u'COMPLETED':
  69. emit_warning('Invalid Swarming task state: %s' % state)
  70. json_data, err_msg = load_shard_json(index, result.get('task_id'),
  71. jsons_to_merge)
  72. if json_data:
  73. # Set-like fields.
  74. for key in ('all_tests', 'disabled_tests', 'global_tags'):
  75. merged[key].update(json_data.get(key), [])
  76. # Dict-like fields.
  77. for key in ('test_locations',):
  78. merged[key].update(json_data.get(key, {}))
  79. # 'per_iteration_data' is a list of dicts. Dicts should be merged
  80. # together, not the 'per_iteration_data' list itself.
  81. merged['per_iteration_data'] = merge_list_of_dicts(
  82. merged['per_iteration_data'],
  83. json_data.get('per_iteration_data', []))
  84. else:
  85. merged['missing_shards'].append(index)
  86. emit_warning('No result was found: %s' % err_msg)
  87. # If some shards are missing, make it known. Continue parsing anyway. Step
  88. # should be red anyway, since swarming.py return non-zero exit code in that
  89. # case.
  90. if merged['missing_shards']:
  91. as_str = ', '.join(map(str, merged['missing_shards']))
  92. emit_warning(
  93. 'some shards did not complete: %s' % as_str,
  94. MISSING_SHARDS_MSG % as_str)
  95. # Not all tests run, combined JSON summary can not be trusted.
  96. merged['global_tags'].add('UNRELIABLE_RESULTS')
  97. # Convert to jsonish dict.
  98. for key in ('all_tests', 'disabled_tests', 'global_tags'):
  99. merged[key] = sorted(merged[key])
  100. return merged
  101. OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
  102. def load_shard_json(index, task_id, jsons_to_merge):
  103. """Reads JSON output of the specified shard.
  104. Args:
  105. output_dir: The directory in which to look for the JSON output to load.
  106. index: The index of the shard to load data for, this is for old api.
  107. task_id: The directory of the shard to load data for, this is for new api.
  108. Returns: A tuple containing:
  109. * The contents of path, deserialized into a python object.
  110. * An error string.
  111. (exactly one of the tuple elements will be non-None).
  112. """
  113. # 'output.json' is set in swarming/api.py, gtest_task method.
  114. matching_json_files = [
  115. j for j in jsons_to_merge
  116. if (os.path.basename(j) == 'output.json' and
  117. (os.path.basename(os.path.dirname(j)) == str(index) or
  118. os.path.basename(os.path.dirname(j)) == task_id))]
  119. if not matching_json_files:
  120. print('shard %s test output missing' % index, file=sys.stderr)
  121. return (None, 'shard %s test output was missing' % index)
  122. if len(matching_json_files) > 1:
  123. print('duplicate test output for shard %s' % index, file=sys.stderr)
  124. return (None, 'shard %s test output was duplicated' % index)
  125. path = matching_json_files[0]
  126. try:
  127. filesize = os.stat(path).st_size
  128. if filesize > OUTPUT_JSON_SIZE_LIMIT:
  129. print('output.json is %d bytes. Max size is %d' % (
  130. filesize, OUTPUT_JSON_SIZE_LIMIT), file=sys.stderr)
  131. return (None, 'shard %s test output exceeded the size limit' % index)
  132. with open(path) as f:
  133. return (json.load(f), None)
  134. except (IOError, ValueError, OSError) as e:
  135. print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr)
  136. print('%s: %s' % (type(e).__name__, e), file=sys.stderr)
  137. return (None, 'shard %s test output was missing or invalid' % index)
  138. def merge_list_of_dicts(left, right):
  139. """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
  140. output = []
  141. for i in range(max(len(left), len(right))):
  142. left_dict = left[i] if i < len(left) else {}
  143. right_dict = right[i] if i < len(right) else {}
  144. merged_dict = left_dict.copy()
  145. merged_dict.update(right_dict)
  146. output.append(merged_dict)
  147. return output
  148. def standard_gtest_merge(
  149. output_json, summary_json, jsons_to_merge):
  150. output = merge_shard_results(summary_json, jsons_to_merge)
  151. with open(output_json, 'w') as f:
  152. json.dump(output, f)
  153. return 0
  154. def main(raw_args):
  155. parser = merge_api.ArgumentParser()
  156. args = parser.parse_args(raw_args)
  157. return standard_gtest_merge(
  158. args.output_json, args.summary_json, args.jsons_to_merge)
  159. if __name__ == '__main__':
  160. sys.exit(main(sys.argv[1:]))