queries.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670
  1. # Copyright 2020 The Chromium Authors. All rights reserved.
  2. # Use of this source code is governed by a BSD-style license that can be
  3. # found in the LICENSE file.
  4. """Methods related to querying the ResultDB BigQuery tables."""
  5. import json
  6. import logging
  7. import math
  8. import multiprocessing.pool
  9. import os
  10. import subprocess
  11. import threading
  12. import time
  13. from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
  14. import six
  15. from typ import expectations_parser
  16. from typ import json_results
  17. from unexpected_passes_common import builders as builders_module
  18. from unexpected_passes_common import constants
  19. from unexpected_passes_common import data_types
  20. from unexpected_passes_common import multiprocessing_utils
  21. DEFAULT_NUM_SAMPLES = 100
  22. MAX_ROWS = (2**31) - 1
  23. MAX_QUERY_TRIES = 3
  24. # Used to prevent us from triggering too many queries simultaneously and causing
  25. # a bunch of rate limit errors. Anything below 1.5 seemed to result in enough
  26. # rate limit errors to cause problems. Raising above that for safety.
  27. QUERY_DELAY = 2
  28. # The target number of results/rows per query when running in large query mode.
  29. # Higher values = longer individual query times and higher chances of running
  30. # out of memory in BigQuery. Lower values = more parallelization overhead and
  31. # more issues with rate limit errors.
  32. TARGET_RESULTS_PER_QUERY = 20000
  33. # Subquery for getting all try builds that were used for CL submission. 30 days
  34. # is chosen because the ResultDB tables we pull data from only keep data around
  35. # for 30 days.
  36. SUBMITTED_BUILDS_TEMPLATE = """\
  37. SELECT
  38. CONCAT("build-", CAST(unnested_builds.id AS STRING)) as id
  39. FROM
  40. `commit-queue.{project_view}.attempts`,
  41. UNNEST(builds) as unnested_builds,
  42. UNNEST(gerrit_changes) as unnested_changes
  43. WHERE
  44. unnested_builds.host = "cr-buildbucket.appspot.com"
  45. AND unnested_changes.submit_status = "SUCCESS"
  46. AND start_time > TIMESTAMP_SUB(CURRENT_TIMESTAMP(),
  47. INTERVAL 30 DAY)"""
  48. QueryResult = Dict[str, Any]
  49. QueryParameters = Dict[str, Dict[str, Any]]
  50. # pylint: disable=super-with-arguments,useless-object-inheritance
  51. class BigQueryQuerier(object):
  52. """Class to handle all BigQuery queries for a script invocation."""
  53. def __init__(self, suite: Optional[str], project: str, num_samples: int,
  54. large_query_mode: bool):
  55. """
  56. Args:
  57. suite: A string containing the name of the suite that is being queried
  58. for. Can be None if there is no differentiation between different
  59. suites.
  60. project: A string containing the billing project to use for BigQuery.
  61. num_samples: An integer containing the number of builds to pull results
  62. from.
  63. large_query_mode: A boolean indicating whether large query mode should be
  64. used. In this mode, an initial, smaller query is made and its results
  65. are used to perform additional filtering on a second, larger query in
  66. BigQuery. This works around hitting a hard memory limit when running
  67. the ORDER BY clause.
  68. """
  69. self._suite = suite
  70. self._project = project
  71. self._num_samples = num_samples or DEFAULT_NUM_SAMPLES
  72. self._large_query_mode = large_query_mode
  73. assert self._num_samples > 0
  74. def FillExpectationMapForBuilders(
  75. self, expectation_map: data_types.TestExpectationMap,
  76. builders: Iterable[data_types.BuilderEntry]
  77. ) -> Dict[str, data_types.ResultListType]:
  78. """Fills |expectation_map| with results from |builders|.
  79. Args:
  80. expectation_map: A data_types.TestExpectationMap. Will be modified
  81. in-place.
  82. builders: An iterable of data_types.BuilderEntry containing the builders
  83. to query.
  84. Returns:
  85. A dict containing any results that were retrieved that did not have a
  86. matching expectation in |expectation_map| in the following format:
  87. {
  88. |builder_type|:|builder_name| (str): [
  89. result1 (data_types.Result),
  90. result2 (data_types.Result),
  91. ...
  92. ],
  93. }
  94. """
  95. assert isinstance(expectation_map, data_types.TestExpectationMap)
  96. # Ensure that all the builders are of the same type since we make some
  97. # assumptions about that later on.
  98. assert builders
  99. builder_type = None
  100. for b in builders:
  101. if builder_type is None:
  102. builder_type = b.builder_type
  103. else:
  104. assert b.builder_type == builder_type
  105. # Filter out any builders that we can easily determine do not currently
  106. # produce data we care about.
  107. builders = self._FilterOutInactiveBuilders(builders, builder_type)
  108. # Spin up a separate process for each query/add step. This is wasteful in
  109. # the sense that we'll have a bunch of idle processes once faster steps
  110. # start finishing, but ensures that we start slow queries early and avoids
  111. # the overhead of passing large amounts of data between processes. See
  112. # crbug.com/1182459 for more information on performance considerations.
  113. process_pool = multiprocessing_utils.GetProcessPool(nodes=len(builders))
  114. args = [(b, expectation_map) for b in builders]
  115. results = process_pool.map(self._QueryAddCombined, args)
  116. tmp_expectation_map = data_types.TestExpectationMap()
  117. all_unmatched_results = {}
  118. for (unmatched_results, prefixed_builder_name, merge_map) in results:
  119. tmp_expectation_map.Merge(merge_map, expectation_map)
  120. if unmatched_results:
  121. all_unmatched_results[prefixed_builder_name] = unmatched_results
  122. expectation_map.clear()
  123. expectation_map.update(tmp_expectation_map)
  124. return all_unmatched_results
  125. def _FilterOutInactiveBuilders(self,
  126. builders: Iterable[data_types.BuilderEntry],
  127. builder_type: str
  128. ) -> List[data_types.BuilderEntry]:
  129. """Filters out any builders that are not producing data.
  130. This helps save time on querying, as querying for the builder names is cheap
  131. while querying for individual results from a builder is expensive. Filtering
  132. out inactive builders lets us preemptively remove builders that we know we
  133. won't get any data from, and thus don't need to waste time querying.
  134. Args:
  135. builders: An iterable of data_types.BuilderEntry containing the builders
  136. to query.
  137. builder_type: A string containing the type of builder to query, either
  138. "ci" or "try".
  139. Returns:
  140. A copy of |builders| with any inactive builders removed.
  141. """
  142. include_internal_builders = any(b.is_internal_builder for b in builders)
  143. query = self._GetActiveBuilderQuery(
  144. builder_type, include_internal_builders).encode('utf-8')
  145. cmd = GenerateBigQueryCommand(self._project, {}, batch=False)
  146. with open(os.devnull, 'w') as devnull:
  147. p = subprocess.Popen(cmd,
  148. stdout=subprocess.PIPE,
  149. stderr=devnull,
  150. stdin=subprocess.PIPE)
  151. stdout, _ = p.communicate(query)
  152. if not isinstance(stdout, six.string_types):
  153. stdout = stdout.decode('utf-8')
  154. results = json.loads(stdout)
  155. # We filter from an initial list instead of directly using the returned
  156. # builders since there are cases where they aren't equivalent, such as for
  157. # GPU tests if a particular builder doesn't run a particular suite. This
  158. # could be encapsulated in the query, but this would cause the query to take
  159. # longer. Since generating the initial list locally is basically
  160. # instantenous and we're optimizing for runtime, filtering is the better
  161. # option.
  162. active_builders = {r['builder_name'] for r in results}
  163. filtered_builders = [b for b in builders if b.name in active_builders]
  164. return filtered_builders
  165. def _QueryAddCombined(
  166. self,
  167. inputs: Tuple[data_types.BuilderEntry, data_types.TestExpectationMap]
  168. ) -> Tuple[data_types.ResultListType, str, data_types.TestExpectationMap]:
  169. """Combines the query and add steps for use in a process pool.
  170. Args:
  171. inputs: An iterable of inputs for QueryBuilder() and
  172. data_types.TestExpectationMap.AddResultList(). Should be in the order:
  173. builder expectation_map
  174. Returns:
  175. The output of data_types.TestExpectationMap.AddResultList().
  176. """
  177. builder, expectation_map = inputs
  178. results, expectation_files = self.QueryBuilder(builder)
  179. prefixed_builder_name = '%s/%s:%s' % (builder.project, builder.builder_type,
  180. builder.name)
  181. unmatched_results = expectation_map.AddResultList(prefixed_builder_name,
  182. results,
  183. expectation_files)
  184. return unmatched_results, prefixed_builder_name, expectation_map
  185. def QueryBuilder(self, builder: data_types.BuilderEntry
  186. ) -> Tuple[data_types.ResultListType, Optional[List[str]]]:
  187. """Queries ResultDB for results from |builder|.
  188. Args:
  189. builder: A data_types.BuilderEntry containing the builder to query.
  190. Returns:
  191. A tuple (results, expectation_files). |results| is the results returned by
  192. the query converted into a list of data_types.Result objects.
  193. |expectation_files| is a set of strings denoting which expectation files
  194. are relevant to |results|, or None if all should be used.
  195. """
  196. query_generator = self._GetQueryGeneratorForBuilder(builder)
  197. if not query_generator:
  198. # No affected tests on this builder, so early return.
  199. return [], None
  200. # Query for the test data from the builder, splitting the query if we run
  201. # into the BigQuery hard memory limit. Even if we keep failing, this will
  202. # eventually stop due to getting a QuerySplitError when we can't split the
  203. # query any further.
  204. query_results = None
  205. while query_results is None:
  206. try:
  207. query_results = self._RunBigQueryCommandsForJsonOutput(
  208. query_generator.GetQueries(), {
  209. '': {
  210. 'builder_name': builder.name
  211. },
  212. 'INT64': {
  213. 'num_builds': self._num_samples
  214. }
  215. })
  216. except MemoryLimitError:
  217. logging.warning(
  218. 'Query to builder %s hit BigQuery hard memory limit, trying again '
  219. 'with more query splitting.', builder.name)
  220. query_generator.SplitQuery()
  221. results = []
  222. if not query_results:
  223. # Don't bother logging if we know this is a fake CI builder.
  224. if not (builder.builder_type == constants.BuilderTypes.CI
  225. and builder in builders_module.GetInstance().GetFakeCiBuilders()):
  226. logging.warning(
  227. 'Did not get results for "%s", but this may be because its '
  228. 'results do not apply to any expectations for this suite.',
  229. builder.name)
  230. return results, None
  231. # It's possible that a builder runs multiple versions of a test with
  232. # different expectation files for each version. So, find a result for each
  233. # unique step and get the expectation files from all of them.
  234. results_for_each_step = {}
  235. for qr in query_results:
  236. step_name = qr['step_name']
  237. if step_name not in results_for_each_step:
  238. results_for_each_step[step_name] = qr
  239. expectation_files = []
  240. for qr in results_for_each_step.values():
  241. # None is a special value indicating "use all expectation files", so
  242. # handle that.
  243. ef = self._GetRelevantExpectationFilesForQueryResult(qr)
  244. if ef is None:
  245. expectation_files = None
  246. break
  247. expectation_files.extend(ef)
  248. if expectation_files is not None:
  249. expectation_files = list(set(expectation_files))
  250. for r in query_results:
  251. if self._ShouldSkipOverResult(r):
  252. continue
  253. results.append(self._ConvertJsonResultToResultObject(r))
  254. logging.debug('Got %d results for %s builder %s', len(results),
  255. builder.builder_type, builder.name)
  256. return results, expectation_files
  257. def _ConvertJsonResultToResultObject(self, json_result: QueryResult
  258. ) -> data_types.Result:
  259. """Converts a single BigQuery JSON result to a data_types.Result.
  260. Args:
  261. json_result: A single row/result from BigQuery in JSON format.
  262. Returns:
  263. A data_types.Result object containing the information from |json_result|.
  264. """
  265. build_id = _StripPrefixFromBuildId(json_result['id'])
  266. test_name = self._StripPrefixFromTestId(json_result['test_id'])
  267. actual_result = _ConvertActualResultToExpectationFileFormat(
  268. json_result['status'])
  269. tags = json_result['typ_tags']
  270. step = json_result['step_name']
  271. return data_types.Result(test_name, tags, actual_result, step, build_id)
  272. def _GetRelevantExpectationFilesForQueryResult(self, query_result: QueryResult
  273. ) -> Optional[Iterable[str]]:
  274. """Gets the relevant expectation file names for a given query result.
  275. Args:
  276. query_result: A dict containing single row/result from a BigQuery query.
  277. Returns:
  278. An iterable of strings containing expectation file names that are
  279. relevant to |query_result|, or None if all expectation files should be
  280. considered relevant.
  281. """
  282. raise NotImplementedError()
  283. def _ShouldSkipOverResult(self, result: QueryResult) -> bool:
  284. """Whether |result| should be ignored and skipped over.
  285. Args:
  286. result: A dict containing a single BigQuery result row.
  287. Returns:
  288. True if the result should be skipped over/ignored, otherwise False.
  289. """
  290. del result
  291. return False
  292. def _GetQueryGeneratorForBuilder(self, builder: data_types.BuilderEntry
  293. ) -> Optional['BaseQueryGenerator']:
  294. """Returns a BaseQueryGenerator instance to only include relevant tests.
  295. Args:
  296. builder: A data_types.BuilderEntry containing the builder to query.
  297. Returns:
  298. None if the query returned no results. Otherwise, some instance of a
  299. BaseQueryGenerator.
  300. """
  301. raise NotImplementedError()
  302. def _RunBigQueryCommandsForJsonOutput(self, queries: Union[str, List[str]],
  303. parameters: QueryParameters
  304. ) -> List[QueryResult]:
  305. """Runs the given BigQuery queries and returns their outputs as JSON.
  306. Args:
  307. queries: A string or list of strings containing valid BigQuery queries to
  308. run or a single string containing a query.
  309. parameters: A dict specifying parameters to substitute in the query in
  310. the format {type: {key: value}}. For example, the dict:
  311. {'INT64': {'num_builds': 5}}
  312. would result in --parameter=num_builds:INT64:5 being passed to
  313. BigQuery.
  314. Returns:
  315. The combined results of |queries| in JSON.
  316. """
  317. if isinstance(queries, str):
  318. queries = [queries]
  319. assert isinstance(queries, list)
  320. processes = set()
  321. processes_lock = threading.Lock()
  322. def run_cmd_in_thread(inputs: Tuple[List[str], str]) -> str:
  323. cmd, query = inputs
  324. query = query.encode('utf-8')
  325. with open(os.devnull, 'w') as devnull:
  326. with processes_lock:
  327. # Starting many queries at once causes us to hit rate limits much more
  328. # frequently, so stagger query starts to help avoid that.
  329. time.sleep(QUERY_DELAY)
  330. p = subprocess.Popen(cmd,
  331. stdout=subprocess.PIPE,
  332. stderr=devnull,
  333. stdin=subprocess.PIPE)
  334. processes.add(p)
  335. # We pass in the query via stdin instead of including it on the
  336. # commandline because we can run into command length issues in large
  337. # query mode.
  338. stdout, _ = p.communicate(query)
  339. if not isinstance(stdout, six.string_types):
  340. stdout = stdout.decode('utf-8')
  341. if p.returncode:
  342. # When running many queries in parallel, it's possible to hit the
  343. # rate limit for the account if we're unlucky, so try again if we do.
  344. if 'Exceeded rate limits' in stdout:
  345. raise RateLimitError()
  346. error_msg = 'Error running command %s. stdout: %s' % (cmd, stdout)
  347. if 'memory' in stdout:
  348. raise MemoryLimitError(error_msg)
  349. raise RuntimeError(error_msg)
  350. return stdout
  351. def run_cmd(cmd: List[str], tries: int) -> List[str]:
  352. if tries >= MAX_QUERY_TRIES:
  353. raise RuntimeError('Query failed too many times, aborting')
  354. # We use a thread pool with a thread for each query/process instead of
  355. # just creating the processes due to guidance from the Python docs:
  356. # https://docs.python.org/3/library/subprocess.html#subprocess.Popen.stderr
  357. # We need to write to stdin to pass the query in, but using
  358. # stdout/stderr/stdin directly is discouraged due to the potential for
  359. # deadlocks. The suggested method (using .communicate()) blocks, so we
  360. # need the thread pool to maintain parallelism.
  361. pool = multiprocessing.pool.ThreadPool(len(queries))
  362. def cleanup():
  363. pool.terminate()
  364. for p in processes:
  365. try:
  366. p.terminate()
  367. except OSError:
  368. # We can fail to terminate if the process is already finished, so
  369. # ignore such failures.
  370. pass
  371. processes.clear()
  372. args = [(cmd, q) for q in queries]
  373. try:
  374. return pool.map(run_cmd_in_thread, args)
  375. except RateLimitError:
  376. logging.warning('Query hit rate limit, retrying')
  377. cleanup()
  378. return run_cmd(cmd, tries + 1)
  379. finally:
  380. cleanup()
  381. raise RuntimeError('Hit branch that should be unreachable')
  382. bq_cmd = GenerateBigQueryCommand(self._project, parameters)
  383. stdouts = run_cmd(bq_cmd, 0)
  384. combined_json = []
  385. for result in [json.loads(s) for s in stdouts]:
  386. for row in result:
  387. combined_json.append(row)
  388. return combined_json
  389. def _StripPrefixFromTestId(self, test_id: str) -> str:
  390. """Strips the prefix from a test ID, leaving only the test case name.
  391. Args:
  392. test_id: A string containing a full ResultDB test ID, e.g.
  393. ninja://target/directory.suite.class.test_case
  394. Returns:
  395. A string containing the test cases name extracted from |test_id|.
  396. """
  397. raise NotImplementedError()
  398. def _GetActiveBuilderQuery(self, builder_type: str,
  399. include_internal_builders: bool) -> str:
  400. """Gets the SQL query for determining which builders actually produce data.
  401. Args:
  402. builder_type: A string containing the type of builders to query, either
  403. "ci" or "try".
  404. include_internal_builders: A boolean indicating whether internal builders
  405. should be included in the data that the query will access.
  406. Returns:
  407. A string containing a SQL query that will get all the names of all
  408. relevant builders that are active/producing data.
  409. """
  410. raise NotImplementedError()
  411. class BaseQueryGenerator(object):
  412. """Abstract base class for query generators."""
  413. def __init__(self, builder: data_types.BuilderEntry):
  414. self._builder = builder
  415. def SplitQuery(self) -> None:
  416. """Splits the query into more clauses/queries."""
  417. raise NotImplementedError('SplitQuery must be overridden in a child class')
  418. def GetClauses(self) -> List[str]:
  419. """Gets string representations of the test filters.
  420. Returns:
  421. A list of strings, each string being a valid SQL clause that applies a
  422. portion of the test filter to a query.
  423. """
  424. raise NotImplementedError('GetClauses must be overridden in a child class')
  425. def GetQueries(self) -> List[str]:
  426. """Gets string representations of the queries to run.
  427. Returns:
  428. A list of strings, each string being a valid SQL query that queries a
  429. portion of the tests of interest.
  430. """
  431. raise NotImplementedError('GetQueries must be overridden in a child class')
  432. # pylint: disable=abstract-method
  433. class FixedQueryGenerator(BaseQueryGenerator):
  434. """Concrete test filter that cannot be split."""
  435. def __init__(self, builder: data_types.BuilderEntry, test_filter: str):
  436. """
  437. Args:
  438. test_filter: A string containing the test filter SQL clause to use.
  439. """
  440. super(FixedQueryGenerator, self).__init__(builder)
  441. self._test_filter = test_filter
  442. def SplitQuery(self) -> None:
  443. raise QuerySplitError('Tried to split a query without any test IDs to use, '
  444. 'use --large-query-mode')
  445. def GetClauses(self) -> List[str]:
  446. return [self._test_filter]
  447. # pylint: enable=abstract-method
  448. # pylint: disable=abstract-method
  449. class SplitQueryGenerator(BaseQueryGenerator):
  450. """Concrete test filter that can be split to a desired size."""
  451. def __init__(self, builder: data_types.BuilderEntry, test_ids: List[str],
  452. target_num_samples: int):
  453. """
  454. Args:
  455. test_ids: A list of strings containing the test IDs to use in the test
  456. test filter.
  457. target_num_samples: The target/max number of samples to get from each
  458. query that uses clauses from this test filter.
  459. """
  460. super(SplitQueryGenerator, self).__init__(builder)
  461. self._test_id_lists = []
  462. self._target_num_samples = target_num_samples
  463. self._clauses = []
  464. self._PerformInitialSplit(test_ids)
  465. def _PerformInitialSplit(self, test_ids: List[str]) -> None:
  466. """Evenly splits |test_ids| into lists that are ~|_target_num_samples| long
  467. Only to be called from the constructor.
  468. Args:
  469. test_ids: A list of test IDs to split and assign to the _test_id_lists
  470. member.
  471. """
  472. assert isinstance(test_ids[0], six.string_types)
  473. num_lists = int(math.ceil(float(len(test_ids)) / self._target_num_samples))
  474. list_size = int(math.ceil(float(len(test_ids)) / num_lists))
  475. split_lists = []
  476. start = 0
  477. for _ in range(num_lists):
  478. end = min(len(test_ids), start + list_size)
  479. split_lists.append(test_ids[start:end])
  480. start = end
  481. self._test_id_lists = split_lists
  482. self._GenerateClauses()
  483. def _GenerateClauses(self) -> None:
  484. test_filter_clauses = []
  485. for id_list in self._test_id_lists:
  486. clause = 'AND test_id IN UNNEST([%s])' % ', '.join(id_list)
  487. test_filter_clauses.append(clause)
  488. self._clauses = test_filter_clauses
  489. def SplitQuery(self) -> None:
  490. def _SplitListInHalf(l: list) -> Tuple[list, list]:
  491. assert len(l) > 1
  492. front = l[:len(l) // 2]
  493. back = l[len(l) // 2:]
  494. return front, back
  495. tmp_test_id_lists = []
  496. for til in self._test_id_lists:
  497. if len(til) <= 1:
  498. raise QuerySplitError(
  499. 'Cannot split query any further, try lowering --num-samples')
  500. front, back = _SplitListInHalf(til)
  501. tmp_test_id_lists.append(front)
  502. tmp_test_id_lists.append(back)
  503. self._test_id_lists = tmp_test_id_lists
  504. self._GenerateClauses()
  505. def GetClauses(self) -> List[str]:
  506. return self._clauses
  507. # pylint: enable=abstract-method
  508. def GenerateBigQueryCommand(project: str,
  509. parameters: QueryParameters,
  510. batch: bool = True) -> List[str]:
  511. """Generate a BigQuery commandline.
  512. Does not contain the actual query, as that is passed in via stdin.
  513. Args:
  514. project: A string containing the billing project to use for BigQuery.
  515. parameters: A dict specifying parameters to substitute in the query in
  516. the format {type: {key: value}}. For example, the dict:
  517. {'INT64': {'num_builds': 5}}
  518. would result in --parameter=num_builds:INT64:5 being passed to BigQuery.
  519. batch: Whether to run the query in batch mode or not. Batching adds some
  520. random amount of overhead since it means the query has to wait for idle
  521. resources, but also allows for much better parallelism.
  522. Returns:
  523. A list containing the BigQuery commandline, suitable to be passed to a
  524. method from the subprocess module.
  525. """
  526. cmd = [
  527. 'bq',
  528. 'query',
  529. '--max_rows=%d' % MAX_ROWS,
  530. '--format=json',
  531. '--project_id=%s' % project,
  532. '--use_legacy_sql=false',
  533. ]
  534. if batch:
  535. cmd.append('--batch')
  536. for parameter_type, parameter_pairs in parameters.items():
  537. for k, v in parameter_pairs.items():
  538. cmd.append('--parameter=%s:%s:%s' % (k, parameter_type, v))
  539. return cmd
  540. def _StripPrefixFromBuildId(build_id: str) -> str:
  541. # Build IDs provided by ResultDB are prefixed with "build-"
  542. split_id = build_id.split('-')
  543. assert len(split_id) == 2
  544. return split_id[-1]
  545. def _ConvertActualResultToExpectationFileFormat(actual_result: str) -> str:
  546. # Web tests use ResultDB's ABORT value for both test timeouts and device
  547. # failures, but Abort is not defined in typ. So, map it to timeout now.
  548. if actual_result == 'ABORT':
  549. actual_result = json_results.ResultType.Timeout
  550. # The result reported to ResultDB is in the format PASS/FAIL, while the
  551. # expected results in an expectation file are in the format Pass/Failure.
  552. return expectations_parser.RESULT_TAGS[actual_result]
  553. class RateLimitError(Exception):
  554. """Exception raised when BigQuery hits a rate limit error."""
  555. class MemoryLimitError(Exception):
  556. """Exception raised when BigQuery hits its hard memory limit."""
  557. class QuerySplitError(Exception):
  558. """Exception raised when a query cannot be split any further."""