standard_isolated_script_merge.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. #!/usr/bin/env python3
  2. # Copyright 2017 The Chromium Authors. All rights reserved.
  3. # Use of this source code is governed by a BSD-style license that can be
  4. # found in the LICENSE file.
  5. from __future__ import print_function
  6. import json
  7. import os
  8. import six
  9. import sys
  10. import merge_api
  11. import results_merger
  12. def StandardIsolatedScriptMerge(output_json, summary_json, jsons_to_merge):
  13. """Merge the contents of one or more results JSONs into a single JSON.
  14. Args:
  15. output_json: A path to a JSON file to which the merged results should be
  16. written.
  17. jsons_to_merge: A list of paths to JSON files that should be merged.
  18. """
  19. # summary.json is produced by swarming client itself. We are mostly interested
  20. # in the number of shards.
  21. try:
  22. with open(summary_json) as f:
  23. summary = json.load(f)
  24. except (IOError, ValueError):
  25. print((
  26. 'summary.json is missing or can not be read',
  27. 'Something is seriously wrong with swarming client or the bot.'),
  28. file=sys.stderr)
  29. return 1
  30. missing_shards = []
  31. shard_results_list = []
  32. for index, result in enumerate(summary['shards']):
  33. output_path = None
  34. if result:
  35. output_path = find_shard_output_path(index, result.get('task_id'),
  36. jsons_to_merge)
  37. if not output_path:
  38. missing_shards.append(index)
  39. continue
  40. with open(output_path) as f:
  41. try:
  42. json_contents = json.load(f)
  43. except ValueError as e:
  44. six.raise_from(ValueError(
  45. 'Failed to parse JSON from %s' % output_path), e)
  46. shard_results_list.append(json_contents)
  47. merged_results = results_merger.merge_test_results(shard_results_list)
  48. if missing_shards:
  49. merged_results['missing_shards'] = missing_shards
  50. if 'global_tags' not in merged_results:
  51. merged_results['global_tags'] = []
  52. merged_results['global_tags'].append('UNRELIABLE_RESULTS')
  53. with open(output_json, 'w') as f:
  54. json.dump(merged_results, f)
  55. return 0
  56. def find_shard_output_path(index, task_id, jsons_to_merge):
  57. """Finds the shard matching the index/task-id.
  58. Args:
  59. index: The index of the shard to load data for, this is for old api.
  60. task_id: The directory of the shard to load data for, this is for new api.
  61. jsons_to_merge: A container of file paths for shards that emitted output.
  62. Returns:
  63. * The matching path, or None
  64. """
  65. # 'output.json' is set in swarming/api.py, gtest_task method.
  66. matching_json_files = [
  67. j for j in jsons_to_merge
  68. if (os.path.basename(j) == 'output.json' and
  69. (os.path.basename(os.path.dirname(j)) == str(index) or
  70. os.path.basename(os.path.dirname(j)) == task_id))]
  71. if not matching_json_files:
  72. print('shard %s test output missing' % index, file=sys.stderr)
  73. return None
  74. if len(matching_json_files) > 1:
  75. print('duplicate test output for shard %s' % index, file=sys.stderr)
  76. return None
  77. return matching_json_files[0]
  78. def main(raw_args):
  79. parser = merge_api.ArgumentParser()
  80. args = parser.parse_args(raw_args)
  81. return StandardIsolatedScriptMerge(
  82. args.output_json, args.summary_json, args.jsons_to_merge)
  83. if __name__ == '__main__':
  84. sys.exit(main(sys.argv[1:]))