parameterised-gtest.json 41 KB

12345678910111213
  1. {
  2. "args": [
  3. "./disable",
  4. "All/TabCapturePerformanceTest.Performance/0"
  5. ],
  6. "requests": "{\"GetTestResultHistory/{\\\"realm\\\": \\\"chromium:ci\\\", \\\"testIdRegexp\\\": \\\"ninja://.*/TabCapturePerformanceTest.Performance(/.*)?\\\", \\\"pageSize\\\": 1}\": \"{\\\"entries\\\":[{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:21:13.577974Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71a15bc953911/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.0/results/c4c4aa3b-00128\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.0\\\",\\\"resultId\\\":\\\"c4c4aa3b-00128\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Mac Release (Intel)\\\",\\\"gpu\\\":\\\"8086\\\",\\\"os\\\":\\\"Mac-11.5.2\\\",\\\"test_suite\\\":\\\"tab_capture_end2end_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"SKIP\\\",\\\"variantHash\\\":\\\"794f8064c6f38e86\\\"}}],\\\"nextPageToken\\\":\\\"CgJ0cwoYMDg4OThhY2Q4ZjA2MTBmMGRkY2M5MzAyCgEx\\\"}\\n\", \"GetTestResultHistory/{\\\"realm\\\": \\\"chromium:ci\\\", \\\"testIdRegexp\\\": \\\"ninja://.*/TabCapturePerformanceTest.Performance(/.*)?\\\", \\\"pageSize\\\": 10}\": \"{\\\"entries\\\":[{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:21:13.577974Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71a15bc953911/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.0/results/c4c4aa3b-00128\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.0\\\",\\\"resultId\\\":\\\"c4c4aa3b-00128\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Mac Release (Intel)\\\",\\\"gpu\\\":\\\"8086\\\",\\\"os\\\":\\\"Mac-11.5.2\\\",\\\"test_suite\\\":\\\"tab_capture_end2end_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"SKIP\\\",\\\"variantHash\\\":\\\"794f8064c6f38e86\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:21:13.577974Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71a15bc953911/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.1/results/c4c4aa3b-00129\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.1\\\",\\\"resultId\\\":\\\"c4c4aa3b-00129\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Mac Release (Intel)\\\",\\\"gpu\\\":\\\"8086\\\",\\\"os\\\":\\\"Mac-11.5.2\\\",\\\"test_suite\\\":\\\"tab_capture_end2end_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"SKIP\\\",\\\"variantHash\\\":\\\"794f8064c6f38e86\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:21:13.577974Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71a15bc953911/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.2/results/c4c4aa3b-00130\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.2\\\",\\\"resultId\\\":\\\"c4c4aa3b-00130\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Mac Release (Intel)\\\",\\\"gpu\\\":\\\"8086\\\",\\\"os\\\":\\\"Mac-11.5.2\\\",\\\"test_suite\\\":\\\"tab_capture_end2end_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"SKIP\\\",\\\"variantHash\\\":\\\"794f8064c6f38e86\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:21:13.577974Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71a15bc953911/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.3/results/c4c4aa3b-00131\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.3\\\",\\\"resultId\\\":\\\"c4c4aa3b-00131\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Mac Release (Intel)\\\",\\\"gpu\\\":\\\"8086\\\",\\\"os\\\":\\\"Mac-11.5.2\\\",\\\"test_suite\\\":\\\"tab_capture_end2end_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"SKIP\\\",\\\"variantHash\\\":\\\"794f8064c6f38e86\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:20:01.487628Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71aeee74fb111/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.0/results/0b1f43d0-01200\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.0\\\",\\\"resultId\\\":\\\"0b1f43d0-01200\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"linux-bfcache-rel\\\",\\\"os\\\":\\\"Ubuntu-18.04\\\",\\\"test_suite\\\":\\\"bf_cache_browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"duration\\\":\\\"5.248s\\\",\\\"variantHash\\\":\\\"37fa67a8acf15536\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:20:01.487628Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71af271510411/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.1/results/4ac7a082-01255\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.1\\\",\\\"resultId\\\":\\\"4ac7a082-01255\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"linux-bfcache-rel\\\",\\\"os\\\":\\\"Ubuntu-18.04\\\",\\\"test_suite\\\":\\\"bf_cache_browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"duration\\\":\\\"6.296s\\\",\\\"variantHash\\\":\\\"37fa67a8acf15536\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:20:01.487628Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71af5a76aea11/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.3/results/43da9225-01252\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.3\\\",\\\"resultId\\\":\\\"43da9225-01252\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"linux-bfcache-rel\\\",\\\"os\\\":\\\"Ubuntu-18.04\\\",\\\"test_suite\\\":\\\"bf_cache_browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"duration\\\":\\\"6.031s\\\",\\\"variantHash\\\":\\\"37fa67a8acf15536\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:17:16.536058Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b7176691687c11/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.0/results/f3f495c9-01338\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.0\\\",\\\"resultId\\\":\\\"f3f495c9-01338\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Win10 Tests x64\\\",\\\"os\\\":\\\"Windows-10-19042\\\",\\\"test_suite\\\":\\\"browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"duration\\\":\\\"6.285s\\\",\\\"variantHash\\\":\\\"1c8234ad9f9159c5\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:17:16.536058Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b7175e6d20a611/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.1/results/1cd61c6a-01346\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.1\\\",\\\"resultId\\\":\\\"1cd61c6a-01346\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Win10 Tests x64\\\",\\\"os\\\":\\\"Windows-10-19042\\\",\\\"test_suite\\\":\\\"browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"duration\\\":\\\"24.702s\\\",\\\"variantHash\\\":\\\"1c8234ad9f9159c5\\\"}},{\\\"invocationTimestamp\\\":\\\"2022-01-28T01:17:16.536058Z\\\",\\\"result\\\":{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71770d31f0f11/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.2/results/1cc8c530-01358\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.2\\\",\\\"resultId\\\":\\\"1cc8c530-01358\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"Win10 Tests x64\\\",\\\"os\\\":\\\"Windows-10-19042\\\",\\\"test_suite\\\":\\\"browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"duration\\\":\\\"8.347s\\\",\\\"variantHash\\\":\\\"1c8234ad9f9159c5\\\"}}],\\\"nextPageToken\\\":\\\"CgJ0cwoYMDg5Yzg4Y2Q4ZjA2MTA5MGIxY2VmZjAxCgEz\\\"}\\n\", \"GetTestResult/{\\\"name\\\": \\\"invocations/task-chromium-swarm.appspot.com-58b71aeee74fb111/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.0/results/0b1f43d0-01200\\\"}\": \"{\\\"name\\\":\\\"invocations/task-chromium-swarm.appspot.com-58b71aeee74fb111/tests/ninja:%2F%2Fchrome%2Ftest:browser_tests%2FTabCapturePerformanceTest.Performance%2FAll.0/results/0b1f43d0-01200\\\",\\\"testId\\\":\\\"ninja://chrome/test:browser_tests/TabCapturePerformanceTest.Performance/All.0\\\",\\\"resultId\\\":\\\"0b1f43d0-01200\\\",\\\"variant\\\":{\\\"def\\\":{\\\"builder\\\":\\\"linux-bfcache-rel\\\",\\\"os\\\":\\\"Ubuntu-18.04\\\",\\\"test_suite\\\":\\\"bf_cache_browser_tests\\\"}},\\\"expected\\\":true,\\\"status\\\":\\\"PASS\\\",\\\"summaryHtml\\\":\\\"\\\\u003cp\\\\u003e\\\\u003ctext-artifact artifact-id=\\\\\\\"snippet\\\\\\\" /\\\\u003e\\\\u003c/p\\\\u003e\\\",\\\"duration\\\":\\\"5.248s\\\",\\\"tags\\\":[{\\\"key\\\":\\\"gtest_global_tag\\\",\\\"value\\\":\\\"CPU_64_BITS\\\"},{\\\"key\\\":\\\"gtest_global_tag\\\",\\\"value\\\":\\\"MODE_RELEASE\\\"},{\\\"key\\\":\\\"gtest_global_tag\\\",\\\"value\\\":\\\"OS_LINUX\\\"},{\\\"key\\\":\\\"gtest_global_tag\\\",\\\"value\\\":\\\"OS_POSIX\\\"},{\\\"key\\\":\\\"gtest_status\\\",\\\"value\\\":\\\"SUCCESS\\\"},{\\\"key\\\":\\\"lossless_snippet\\\",\\\"value\\\":\\\"true\\\"},{\\\"key\\\":\\\"monorail_component\\\",\\\"value\\\":\\\"Internals\\\\u003eMedia\\\\u003eSurfaceCapture\\\"},{\\\"key\\\":\\\"orig_format\\\",\\\"value\\\":\\\"chromium_gtest\\\"},{\\\"key\\\":\\\"step_name\\\",\\\"value\\\":\\\"bf_cache_browser_tests on Ubuntu-18.04\\\"},{\\\"key\\\":\\\"team_email\\\",\\\"value\\\":\\\"media-capture-dev@chromium.org\\\"},{\\\"key\\\":\\\"test_name\\\",\\\"value\\\":\\\"All/TabCapturePerformanceTest.Performance/0\\\"}],\\\"variantHash\\\":\\\"37fa67a8acf15536\\\",\\\"testMetadata\\\":{\\\"name\\\":\\\"All/TabCapturePerformanceTest.Performance/0\\\",\\\"location\\\":{\\\"repo\\\":\\\"https://chromium.googlesource.com/chromium/src\\\",\\\"fileName\\\":\\\"//chrome/browser/extensions/api/tab_capture/tab_capture_performancetest.cc\\\",\\\"line\\\":294}}}\\n\"}",
  7. "read_data": {
  8. "chrome/browser/extensions/api/tab_capture/tab_capture_performancetest.cc": "// Copyright 2013 The Chromium Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\n#include <cmath>\n#include <unordered_map>\n\n#include \"base/command_line.h\"\n#include \"base/files/file_util.h\"\n#include \"base/strings/stringprintf.h\"\n#include \"base/test/trace_event_analyzer.h\"\n#include \"build/build_config.h\"\n#include \"build/chromeos_buildflags.h\"\n#include \"chrome/browser/extensions/api/tab_capture/tab_capture_performance_test_base.h\"\n#include \"chrome/browser/extensions/extension_service.h\"\n#include \"chrome/browser/profiles/profile.h\"\n#include \"chrome/browser/ui/exclusive_access/fullscreen_controller.h\"\n#include \"chrome/common/chrome_switches.h\"\n#include \"chrome/test/base/in_process_browser_test.h\"\n#include \"chrome/test/base/test_launcher_utils.h\"\n#include \"chrome/test/base/test_switches.h\"\n#include \"chrome/test/base/tracing.h\"\n#include \"content/public/common/content_switches.h\"\n#include \"content/public/test/browser_test.h\"\n#include \"extensions/common/switches.h\"\n#include \"extensions/test/extension_test_message_listener.h\"\n#include \"testing/gtest/include/gtest/gtest.h\"\n#include \"testing/perf/perf_result_reporter.h\"\n#include \"ui/compositor/compositor_switches.h\"\n#include \"ui/gl/gl_switches.h\"\n\nnamespace {\n\n// Number of events to trim from the beginning and end. These events don't\n// contribute anything toward stable measurements: A brief moment of startup\n// \"jank\" is acceptable, and shutdown may result in missing events (since\n// render widget draws may stop before capture stops).\nconstexpr int kTrimEvents = 24; // 1 sec at 24fps, or 0.4 sec at 60 fps.\n\n// Minimum number of events required for a reasonable analysis.\nconstexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps.\n\n// Minimum number of events required for data analysis in a non-performance run.\nconstexpr int kMinDataPointsForQuickRun = 3;\n\nconstexpr char kMetricPrefixTabCapture[] = \"TabCapture.\";\nconstexpr char kMetricCaptureMs[] = \"capture\";\nconstexpr char kMetricCaptureFailRatePercent[] = \"capture_fail_rate\";\nconstexpr char kMetricCaptureLatencyMs[] = \"capture_latency\";\nconstexpr char kMetricRendererFrameDrawMs[] = \"renderer_frame_draw\";\n\nconstexpr char kEventCapture[] = \"Capture\";\nconstexpr char kEventSuffixFailRate[] = \"FailRate\";\nconstexpr char kEventSuffixLatency[] = \"Latency\";\nconstexpr char kEventCommitAndDrawCompositorFrame[] =\n \"WidgetBase::DidCommitAndDrawCompositorFrame\";\nconst std::unordered_map<std::string, std::string> kEventToMetricMap(\n {{kEventCapture, kMetricCaptureMs},\n {std::string(kEventCapture) + kEventSuffixFailRate,\n kMetricCaptureFailRatePercent},\n {std::string(kEventCapture) + kEventSuffixLatency,\n kMetricCaptureLatencyMs},\n {kEventCommitAndDrawCompositorFrame, kMetricRendererFrameDrawMs}});\n\nperf_test::PerfResultReporter SetUpTabCaptureReporter(\n const std::string& story) {\n perf_test::PerfResultReporter reporter(kMetricPrefixTabCapture, story);\n reporter.RegisterImportantMetric(kMetricCaptureMs, \"ms\");\n reporter.RegisterImportantMetric(kMetricCaptureFailRatePercent, \"percent\");\n reporter.RegisterImportantMetric(kMetricCaptureLatencyMs, \"ms\");\n reporter.RegisterImportantMetric(kMetricRendererFrameDrawMs, \"ms\");\n return reporter;\n}\n\nstd::string GetMetricFromEventName(const std::string& event_name) {\n auto iter = kEventToMetricMap.find(event_name);\n return iter == kEventToMetricMap.end() ? event_name : iter->second;\n}\n\n// A convenience macro to run a gtest expectation in the \"full performance run\"\n// setting, or else a warning that something is not being entirely tested in the\n// \"CQ run\" setting. This is required because the test runs in the CQ may not be\n// long enough to collect sufficient tracing data; and, unfortunately, there's\n// nothing we can do about that.\n#define EXPECT_FOR_PERFORMANCE_RUN(expr) \\\n do { \\\n if (is_full_performance_run()) { \\\n EXPECT_TRUE(expr); \\\n } else if (!(expr)) { \\\n LOG(WARNING) << \"Allowing failure: \" << #expr; \\\n } \\\n } while (false)\n\nenum TestFlags {\n kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given\n // on the command line. This is required for\n // tests that run on GPU.\n kTestThroughWebRTC = 1 << 3, // Send video through a webrtc loopback.\n kSmallWindow = 1 << 4, // Window size: 1 = 800x600, 0 = 2000x1000\n};\n\n// Perfetto trace events should have a \"success\" that is either on\n// the beginning or end event.\nbool EventWasSuccessful(const trace_analyzer::TraceEvent* event) {\n double result;\n // First case: the begin event had a success.\n if (event->GetArgAsNumber(\"success\", &result) && result > 0.0) {\n return true;\n }\n\n // Second case: the end event had a success.\n if (event->other_event &&\n event->other_event->GetArgAsNumber(\"success\", &result) && result > 0.0) {\n return true;\n }\n\n return false;\n}\nclass TabCapturePerformanceTest : public TabCapturePerformanceTestBase,\n public testing::WithParamInterface<int> {\n public:\n TabCapturePerformanceTest() = default;\n ~TabCapturePerformanceTest() override = default;\n\n bool HasFlag(TestFlags flag) const {\n return (GetParam() & flag) == flag;\n }\n\n std::string GetSuffixForTestFlags() const {\n std::string suffix;\n if (HasFlag(kUseGpu))\n suffix += \"_comp_gpu\";\n if (HasFlag(kTestThroughWebRTC))\n suffix += \"_webrtc\";\n if (HasFlag(kSmallWindow))\n suffix += \"_small\";\n // Make sure we always have a story.\n if (suffix.size() == 0) {\n suffix = \"_baseline_story\";\n }\n // Strip off the leading _.\n suffix.erase(0, 1);\n return suffix;\n }\n\n void SetUp() override {\n const base::FilePath test_file = GetApiTestDataDir()\n .AppendASCII(\"tab_capture\")\n .AppendASCII(\"balls.html\");\n const bool success = base::ReadFileToString(test_file, &test_page_html_);\n CHECK(success) << \"Failed to load test page at: \"\n << test_file.AsUTF8Unsafe();\n\n if (!HasFlag(kUseGpu))\n UseSoftwareCompositing();\n\n TabCapturePerformanceTestBase::SetUp();\n }\n\n void SetUpCommandLine(base::CommandLine* command_line) override {\n if (HasFlag(kSmallWindow)) {\n command_line->AppendSwitchASCII(switches::kWindowSize, \"800,600\");\n } else {\n command_line->AppendSwitchASCII(switches::kWindowSize, \"2000,1500\");\n }\n\n TabCapturePerformanceTestBase::SetUpCommandLine(command_line);\n }\n\n // Analyze and print the mean and stddev of how often events having the name\n // |event_name| occur.\n bool PrintRateResults(trace_analyzer::TraceAnalyzer* analyzer,\n const std::string& event_name) {\n trace_analyzer::TraceEventVector events;\n QueryTraceEvents(analyzer, event_name, &events);\n\n // Ignore some events for startup/setup/caching/teardown.\n const int trim_count = is_full_performance_run() ? kTrimEvents : 0;\n if (static_cast<int>(events.size()) < trim_count * 2) {\n LOG(ERROR) << \"Fewer events for \" << event_name\n << \" than would be trimmed: \" << events.size();\n return false;\n }\n trace_analyzer::TraceEventVector rate_events(events.begin() + trim_count,\n events.end() - trim_count);\n trace_analyzer::RateStats stats;\n const bool have_rate_stats = GetRateStats(rate_events, &stats, nullptr);\n double mean_ms = stats.mean_us / 1000.0;\n double std_dev_ms = stats.standard_deviation_us / 1000.0;\n std::string mean_and_error = base::StringPrintf(\"%f,%f\", mean_ms,\n std_dev_ms);\n auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());\n reporter.AddResultMeanAndError(GetMetricFromEventName(event_name),\n mean_and_error);\n return have_rate_stats;\n }\n\n // Analyze and print the mean and stddev of the amount of time between the\n // begin and end timestamps of each event having the name |event_name|.\n bool PrintLatencyResults(trace_analyzer::TraceAnalyzer* analyzer,\n const std::string& event_name) {\n trace_analyzer::TraceEventVector events;\n QueryTraceEvents(analyzer, event_name, &events);\n\n // Ignore some events for startup/setup/caching/teardown.\n const int trim_count = is_full_performance_run() ? kTrimEvents : 0;\n if (static_cast<int>(events.size()) < trim_count * 2) {\n LOG(ERROR) << \"Fewer events for \" << event_name\n << \" than would be trimmed: \" << events.size();\n return false;\n }\n trace_analyzer::TraceEventVector events_to_analyze(\n events.begin() + trim_count, events.end() - trim_count);\n\n // Compute mean and standard deviation of all capture latencies.\n double sum = 0.0;\n double sqr_sum = 0.0;\n int count = 0;\n for (const auto* begin_event : events_to_analyze) {\n const auto* end_event = begin_event->other_event.get();\n if (!end_event)\n continue;\n const double latency = end_event->timestamp - begin_event->timestamp;\n sum += latency;\n sqr_sum += latency * latency;\n ++count;\n }\n const double mean_us = (count == 0) ? NAN : (sum / count);\n const double std_dev_us =\n (count == 0)\n ? NAN\n : (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count);\n auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());\n reporter.AddResultMeanAndError(\n GetMetricFromEventName(event_name + kEventSuffixLatency),\n base::StringPrintf(\"%f,%f\", mean_us / 1000.0, std_dev_us / 1000.0));\n return count > 0;\n }\n\n // Analyze and print the mean and stddev of how often events having the name\n // |event_name| are missing the success=true flag.\n bool PrintFailRateResults(trace_analyzer::TraceAnalyzer* analyzer,\n const std::string& event_name) {\n trace_analyzer::TraceEventVector events;\n QueryTraceEvents(analyzer, event_name, &events);\n\n // Ignore some events for startup/setup/caching/teardown.\n const int trim_count = is_full_performance_run() ? kTrimEvents : 0;\n if (static_cast<int>(events.size()) < trim_count * 2) {\n LOG(ERROR) << \"Fewer events for \" << event_name\n << \" than would be trimmed: \" << events.size();\n return false;\n }\n trace_analyzer::TraceEventVector events_to_analyze(\n events.begin() + trim_count, events.end() - trim_count);\n\n // Compute percentage of begin\u2192end events missing a success=true flag.\n // If there are no events to analyze, then the failure rate is 100%.\n double fail_percent = 100.0;\n if (!events_to_analyze.empty()) {\n int fail_count = 0;\n for (const auto* event : events_to_analyze) {\n if (!EventWasSuccessful(event)) {\n ++fail_count;\n }\n }\n fail_percent = 100.0 * static_cast<double>(fail_count) /\n static_cast<double>(events_to_analyze.size());\n }\n auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());\n reporter.AddResult(\n GetMetricFromEventName(event_name + kEventSuffixFailRate),\n fail_percent);\n return !events_to_analyze.empty();\n }\n\n protected:\n // The HTML test web page that draws animating balls continuously. Populated\n // in SetUp().\n std::string test_page_html_;\n};\n\n} // namespace\n\n#if BUILDFLAG(IS_CHROMEOS_ASH) && defined(MEMORY_SANITIZER)\n// Using MSAN on ChromeOS causes problems due to its hardware OpenGL library.\n#define MAYBE_Performance DISABLED_Performance\n#elif BUILDFLAG(IS_MAC)\n// flaky on Mac 10.11 See: http://crbug.com/1235358\n#define MAYBE_Performance DISABLED_Performance\n#else\n#define MAYBE_Performance Performance\n#endif\nIN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, MAYBE_Performance) {\n // Load the extension and test page, and tell the extension to start tab\n // capture.\n LoadExtension(GetApiTestDataDir()\n .AppendASCII(\"tab_capture\")\n .AppendASCII(\"perftest_extension\"));\n NavigateToTestPage(test_page_html_);\n const base::Value response = SendMessageToExtension(\n base::StringPrintf(\"{start:true, passThroughWebRTC:%s}\",\n HasFlag(kTestThroughWebRTC) ? \"true\" : \"false\"));\n const std::string* reason = response.FindStringKey(\"reason\");\n ASSERT_TRUE(response.FindBoolKey(\"success\").value_or(false))\n << (reason ? *reason : std::string(\"<MISSING REASON>\"));\n\n // Observe the running browser for a while, collecting a trace.\n std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer = TraceAndObserve(\n \"gpu,gpu.capture\",\n std::vector<base::StringPiece>{kEventCommitAndDrawCompositorFrame,\n kEventCapture},\n // In a full performance run, events will be trimmed from both ends of\n // trace. Otherwise, just require the bare-minimum to verify the stats\n // calculations will work.\n is_full_performance_run() ? (2 * kTrimEvents + kMinDataPointsForFullRun)\n : kMinDataPointsForQuickRun);\n\n // The printed result will be the average time between composites in the\n // renderer of the page being captured. This may not reach the full frame\n // rate if the renderer cannot draw as fast as is desired.\n //\n // Note that any changes to drawing or compositing in the renderer,\n // including changes to Blink (e.g., Canvas drawing), layout, etc.; will\n // have an impact on this result.\n EXPECT_FOR_PERFORMANCE_RUN(\n PrintRateResults(analyzer.get(), kEventCommitAndDrawCompositorFrame));\n\n // This prints out the average time between capture events in the browser\n // process. This should roughly match the renderer's draw+composite rate.\n EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(analyzer.get(), kEventCapture));\n\n // Analyze mean/stddev of the capture latency. This is a measure of how long\n // each capture took, from initiation until read-back from the GPU into a\n // media::VideoFrame was complete. Lower is better.\n EXPECT_FOR_PERFORMANCE_RUN(\n PrintLatencyResults(analyzer.get(), kEventCapture));\n\n // Analyze percentage of failed captures. This measures how often captures\n // were initiated, but not completed successfully. Lower is better, and zero\n // is ideal.\n EXPECT_FOR_PERFORMANCE_RUN(\n PrintFailRateResults(analyzer.get(), kEventCapture));\n}\n\n#if BUILDFLAG(IS_CHROMEOS_ASH)\n\n// On ChromeOS, software compositing is not an option.\nINSTANTIATE_TEST_SUITE_P(All,\n TabCapturePerformanceTest,\n testing::Values(kUseGpu,\n kTestThroughWebRTC | kUseGpu));\n\n#else\n\n// Run everything on non-ChromeOS platforms.\nINSTANTIATE_TEST_SUITE_P(All,\n TabCapturePerformanceTest,\n testing::Values(0,\n kUseGpu,\n kTestThroughWebRTC,\n kTestThroughWebRTC | kUseGpu));\n\n#endif // BUILDFLAG(IS_CHROMEOS_ASH)\n"
  9. },
  10. "written_data": {
  11. "chrome/browser/extensions/api/tab_capture/tab_capture_performancetest.cc": "// Copyright 2013 The Chromium Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\n#include <cmath>\n#include <unordered_map>\n\n#include \"base/command_line.h\"\n#include \"base/files/file_util.h\"\n#include \"base/strings/stringprintf.h\"\n#include \"base/test/trace_event_analyzer.h\"\n#include \"build/build_config.h\"\n#include \"build/chromeos_buildflags.h\"\n#include \"chrome/browser/extensions/api/tab_capture/tab_capture_performance_test_base.h\"\n#include \"chrome/browser/extensions/extension_service.h\"\n#include \"chrome/browser/profiles/profile.h\"\n#include \"chrome/browser/ui/exclusive_access/fullscreen_controller.h\"\n#include \"chrome/common/chrome_switches.h\"\n#include \"chrome/test/base/in_process_browser_test.h\"\n#include \"chrome/test/base/test_launcher_utils.h\"\n#include \"chrome/test/base/test_switches.h\"\n#include \"chrome/test/base/tracing.h\"\n#include \"content/public/common/content_switches.h\"\n#include \"content/public/test/browser_test.h\"\n#include \"extensions/common/switches.h\"\n#include \"extensions/test/extension_test_message_listener.h\"\n#include \"testing/gtest/include/gtest/gtest.h\"\n#include \"testing/perf/perf_result_reporter.h\"\n#include \"ui/compositor/compositor_switches.h\"\n#include \"ui/gl/gl_switches.h\"\n\nnamespace {\n\n// Number of events to trim from the beginning and end. These events don't\n// contribute anything toward stable measurements: A brief moment of startup\n// \"jank\" is acceptable, and shutdown may result in missing events (since\n// render widget draws may stop before capture stops).\nconstexpr int kTrimEvents = 24; // 1 sec at 24fps, or 0.4 sec at 60 fps.\n\n// Minimum number of events required for a reasonable analysis.\nconstexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps.\n\n// Minimum number of events required for data analysis in a non-performance run.\nconstexpr int kMinDataPointsForQuickRun = 3;\n\nconstexpr char kMetricPrefixTabCapture[] = \"TabCapture.\";\nconstexpr char kMetricCaptureMs[] = \"capture\";\nconstexpr char kMetricCaptureFailRatePercent[] = \"capture_fail_rate\";\nconstexpr char kMetricCaptureLatencyMs[] = \"capture_latency\";\nconstexpr char kMetricRendererFrameDrawMs[] = \"renderer_frame_draw\";\n\nconstexpr char kEventCapture[] = \"Capture\";\nconstexpr char kEventSuffixFailRate[] = \"FailRate\";\nconstexpr char kEventSuffixLatency[] = \"Latency\";\nconstexpr char kEventCommitAndDrawCompositorFrame[] =\n \"WidgetBase::DidCommitAndDrawCompositorFrame\";\nconst std::unordered_map<std::string, std::string> kEventToMetricMap(\n {{kEventCapture, kMetricCaptureMs},\n {std::string(kEventCapture) + kEventSuffixFailRate,\n kMetricCaptureFailRatePercent},\n {std::string(kEventCapture) + kEventSuffixLatency,\n kMetricCaptureLatencyMs},\n {kEventCommitAndDrawCompositorFrame, kMetricRendererFrameDrawMs}});\n\nperf_test::PerfResultReporter SetUpTabCaptureReporter(\n const std::string& story) {\n perf_test::PerfResultReporter reporter(kMetricPrefixTabCapture, story);\n reporter.RegisterImportantMetric(kMetricCaptureMs, \"ms\");\n reporter.RegisterImportantMetric(kMetricCaptureFailRatePercent, \"percent\");\n reporter.RegisterImportantMetric(kMetricCaptureLatencyMs, \"ms\");\n reporter.RegisterImportantMetric(kMetricRendererFrameDrawMs, \"ms\");\n return reporter;\n}\n\nstd::string GetMetricFromEventName(const std::string& event_name) {\n auto iter = kEventToMetricMap.find(event_name);\n return iter == kEventToMetricMap.end() ? event_name : iter->second;\n}\n\n// A convenience macro to run a gtest expectation in the \"full performance run\"\n// setting, or else a warning that something is not being entirely tested in the\n// \"CQ run\" setting. This is required because the test runs in the CQ may not be\n// long enough to collect sufficient tracing data; and, unfortunately, there's\n// nothing we can do about that.\n#define EXPECT_FOR_PERFORMANCE_RUN(expr) \\\n do { \\\n if (is_full_performance_run()) { \\\n EXPECT_TRUE(expr); \\\n } else if (!(expr)) { \\\n LOG(WARNING) << \"Allowing failure: \" << #expr; \\\n } \\\n } while (false)\n\nenum TestFlags {\n kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given\n // on the command line. This is required for\n // tests that run on GPU.\n kTestThroughWebRTC = 1 << 3, // Send video through a webrtc loopback.\n kSmallWindow = 1 << 4, // Window size: 1 = 800x600, 0 = 2000x1000\n};\n\n// Perfetto trace events should have a \"success\" that is either on\n// the beginning or end event.\nbool EventWasSuccessful(const trace_analyzer::TraceEvent* event) {\n double result;\n // First case: the begin event had a success.\n if (event->GetArgAsNumber(\"success\", &result) && result > 0.0) {\n return true;\n }\n\n // Second case: the end event had a success.\n if (event->other_event &&\n event->other_event->GetArgAsNumber(\"success\", &result) && result > 0.0) {\n return true;\n }\n\n return false;\n}\nclass TabCapturePerformanceTest : public TabCapturePerformanceTestBase,\n public testing::WithParamInterface<int> {\n public:\n TabCapturePerformanceTest() = default;\n ~TabCapturePerformanceTest() override = default;\n\n bool HasFlag(TestFlags flag) const {\n return (GetParam() & flag) == flag;\n }\n\n std::string GetSuffixForTestFlags() const {\n std::string suffix;\n if (HasFlag(kUseGpu))\n suffix += \"_comp_gpu\";\n if (HasFlag(kTestThroughWebRTC))\n suffix += \"_webrtc\";\n if (HasFlag(kSmallWindow))\n suffix += \"_small\";\n // Make sure we always have a story.\n if (suffix.size() == 0) {\n suffix = \"_baseline_story\";\n }\n // Strip off the leading _.\n suffix.erase(0, 1);\n return suffix;\n }\n\n void SetUp() override {\n const base::FilePath test_file = GetApiTestDataDir()\n .AppendASCII(\"tab_capture\")\n .AppendASCII(\"balls.html\");\n const bool success = base::ReadFileToString(test_file, &test_page_html_);\n CHECK(success) << \"Failed to load test page at: \"\n << test_file.AsUTF8Unsafe();\n\n if (!HasFlag(kUseGpu))\n UseSoftwareCompositing();\n\n TabCapturePerformanceTestBase::SetUp();\n }\n\n void SetUpCommandLine(base::CommandLine* command_line) override {\n if (HasFlag(kSmallWindow)) {\n command_line->AppendSwitchASCII(switches::kWindowSize, \"800,600\");\n } else {\n command_line->AppendSwitchASCII(switches::kWindowSize, \"2000,1500\");\n }\n\n TabCapturePerformanceTestBase::SetUpCommandLine(command_line);\n }\n\n // Analyze and print the mean and stddev of how often events having the name\n // |event_name| occur.\n bool PrintRateResults(trace_analyzer::TraceAnalyzer* analyzer,\n const std::string& event_name) {\n trace_analyzer::TraceEventVector events;\n QueryTraceEvents(analyzer, event_name, &events);\n\n // Ignore some events for startup/setup/caching/teardown.\n const int trim_count = is_full_performance_run() ? kTrimEvents : 0;\n if (static_cast<int>(events.size()) < trim_count * 2) {\n LOG(ERROR) << \"Fewer events for \" << event_name\n << \" than would be trimmed: \" << events.size();\n return false;\n }\n trace_analyzer::TraceEventVector rate_events(events.begin() + trim_count,\n events.end() - trim_count);\n trace_analyzer::RateStats stats;\n const bool have_rate_stats = GetRateStats(rate_events, &stats, nullptr);\n double mean_ms = stats.mean_us / 1000.0;\n double std_dev_ms = stats.standard_deviation_us / 1000.0;\n std::string mean_and_error = base::StringPrintf(\"%f,%f\", mean_ms,\n std_dev_ms);\n auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());\n reporter.AddResultMeanAndError(GetMetricFromEventName(event_name),\n mean_and_error);\n return have_rate_stats;\n }\n\n // Analyze and print the mean and stddev of the amount of time between the\n // begin and end timestamps of each event having the name |event_name|.\n bool PrintLatencyResults(trace_analyzer::TraceAnalyzer* analyzer,\n const std::string& event_name) {\n trace_analyzer::TraceEventVector events;\n QueryTraceEvents(analyzer, event_name, &events);\n\n // Ignore some events for startup/setup/caching/teardown.\n const int trim_count = is_full_performance_run() ? kTrimEvents : 0;\n if (static_cast<int>(events.size()) < trim_count * 2) {\n LOG(ERROR) << \"Fewer events for \" << event_name\n << \" than would be trimmed: \" << events.size();\n return false;\n }\n trace_analyzer::TraceEventVector events_to_analyze(\n events.begin() + trim_count, events.end() - trim_count);\n\n // Compute mean and standard deviation of all capture latencies.\n double sum = 0.0;\n double sqr_sum = 0.0;\n int count = 0;\n for (const auto* begin_event : events_to_analyze) {\n const auto* end_event = begin_event->other_event.get();\n if (!end_event)\n continue;\n const double latency = end_event->timestamp - begin_event->timestamp;\n sum += latency;\n sqr_sum += latency * latency;\n ++count;\n }\n const double mean_us = (count == 0) ? NAN : (sum / count);\n const double std_dev_us =\n (count == 0)\n ? NAN\n : (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count);\n auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());\n reporter.AddResultMeanAndError(\n GetMetricFromEventName(event_name + kEventSuffixLatency),\n base::StringPrintf(\"%f,%f\", mean_us / 1000.0, std_dev_us / 1000.0));\n return count > 0;\n }\n\n // Analyze and print the mean and stddev of how often events having the name\n // |event_name| are missing the success=true flag.\n bool PrintFailRateResults(trace_analyzer::TraceAnalyzer* analyzer,\n const std::string& event_name) {\n trace_analyzer::TraceEventVector events;\n QueryTraceEvents(analyzer, event_name, &events);\n\n // Ignore some events for startup/setup/caching/teardown.\n const int trim_count = is_full_performance_run() ? kTrimEvents : 0;\n if (static_cast<int>(events.size()) < trim_count * 2) {\n LOG(ERROR) << \"Fewer events for \" << event_name\n << \" than would be trimmed: \" << events.size();\n return false;\n }\n trace_analyzer::TraceEventVector events_to_analyze(\n events.begin() + trim_count, events.end() - trim_count);\n\n // Compute percentage of begin\u2192end events missing a success=true flag.\n // If there are no events to analyze, then the failure rate is 100%.\n double fail_percent = 100.0;\n if (!events_to_analyze.empty()) {\n int fail_count = 0;\n for (const auto* event : events_to_analyze) {\n if (!EventWasSuccessful(event)) {\n ++fail_count;\n }\n }\n fail_percent = 100.0 * static_cast<double>(fail_count) /\n static_cast<double>(events_to_analyze.size());\n }\n auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());\n reporter.AddResult(\n GetMetricFromEventName(event_name + kEventSuffixFailRate),\n fail_percent);\n return !events_to_analyze.empty();\n }\n\n protected:\n // The HTML test web page that draws animating balls continuously. Populated\n // in SetUp().\n std::string test_page_html_;\n};\n\n} // namespace\n\nIN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, DISABLED_Performance) {\n // Load the extension and test page, and tell the extension to start tab\n // capture.\n LoadExtension(GetApiTestDataDir()\n .AppendASCII(\"tab_capture\")\n .AppendASCII(\"perftest_extension\"));\n NavigateToTestPage(test_page_html_);\n const base::Value response = SendMessageToExtension(\n base::StringPrintf(\"{start:true, passThroughWebRTC:%s}\",\n HasFlag(kTestThroughWebRTC) ? \"true\" : \"false\"));\n const std::string* reason = response.FindStringKey(\"reason\");\n ASSERT_TRUE(response.FindBoolKey(\"success\").value_or(false))\n << (reason ? *reason : std::string(\"<MISSING REASON>\"));\n\n // Observe the running browser for a while, collecting a trace.\n std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer = TraceAndObserve(\n \"gpu,gpu.capture\",\n std::vector<base::StringPiece>{kEventCommitAndDrawCompositorFrame,\n kEventCapture},\n // In a full performance run, events will be trimmed from both ends of\n // trace. Otherwise, just require the bare-minimum to verify the stats\n // calculations will work.\n is_full_performance_run() ? (2 * kTrimEvents + kMinDataPointsForFullRun)\n : kMinDataPointsForQuickRun);\n\n // The printed result will be the average time between composites in the\n // renderer of the page being captured. This may not reach the full frame\n // rate if the renderer cannot draw as fast as is desired.\n //\n // Note that any changes to drawing or compositing in the renderer,\n // including changes to Blink (e.g., Canvas drawing), layout, etc.; will\n // have an impact on this result.\n EXPECT_FOR_PERFORMANCE_RUN(\n PrintRateResults(analyzer.get(), kEventCommitAndDrawCompositorFrame));\n\n // This prints out the average time between capture events in the browser\n // process. This should roughly match the renderer's draw+composite rate.\n EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(analyzer.get(), kEventCapture));\n\n // Analyze mean/stddev of the capture latency. This is a measure of how long\n // each capture took, from initiation until read-back from the GPU into a\n // media::VideoFrame was complete. Lower is better.\n EXPECT_FOR_PERFORMANCE_RUN(\n PrintLatencyResults(analyzer.get(), kEventCapture));\n\n // Analyze percentage of failed captures. This measures how often captures\n // were initiated, but not completed successfully. Lower is better, and zero\n // is ideal.\n EXPECT_FOR_PERFORMANCE_RUN(\n PrintFailRateResults(analyzer.get(), kEventCapture));\n}\n\n#if BUILDFLAG(IS_CHROMEOS_ASH)\n\n// On ChromeOS, software compositing is not an option.\nINSTANTIATE_TEST_SUITE_P(All,\n TabCapturePerformanceTest,\n testing::Values(kUseGpu,\n kTestThroughWebRTC | kUseGpu));\n\n#else\n\n// Run everything on non-ChromeOS platforms.\nINSTANTIATE_TEST_SUITE_P(All,\n TabCapturePerformanceTest,\n testing::Values(0,\n kUseGpu,\n kTestThroughWebRTC,\n kTestThroughWebRTC | kUseGpu));\n\n#endif // BUILDFLAG(IS_CHROMEOS_ASH)\n"
  12. }
  13. }