codeparser.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481
  1. #
  2. # SPDX-License-Identifier: GPL-2.0-only
  3. #
  4. """
  5. BitBake code parser
  6. Parses actual code (i.e. python and shell) for functions and in-line
  7. expressions. Used mainly to determine dependencies on other functions
  8. and variables within the BitBake metadata. Also provides a cache for
  9. this information in order to speed up processing.
  10. (Not to be confused with the code that parses the metadata itself,
  11. see lib/bb/parse/ for that).
  12. NOTE: if you change how the parsers gather information you will almost
  13. certainly need to increment CodeParserCache.CACHE_VERSION below so that
  14. any existing codeparser cache gets invalidated. Additionally you'll need
  15. to increment __cache_version__ in cache.py in order to ensure that old
  16. recipe caches don't trigger "Taskhash mismatch" errors.
  17. """
  18. import ast
  19. import sys
  20. import codegen
  21. import logging
  22. import pickle
  23. import bb.pysh as pysh
  24. import os.path
  25. import bb.utils, bb.data
  26. import hashlib
  27. from itertools import chain
  28. from bb.pysh import pyshyacc, pyshlex, sherrors
  29. from bb.cache import MultiProcessCache
  30. logger = logging.getLogger('BitBake.CodeParser')
  31. def bbhash(s):
  32. return hashlib.sha256(s.encode("utf-8")).hexdigest()
  33. def check_indent(codestr):
  34. """If the code is indented, add a top level piece of code to 'remove' the indentation"""
  35. i = 0
  36. while codestr[i] in ["\n", "\t", " "]:
  37. i = i + 1
  38. if i == 0:
  39. return codestr
  40. if codestr[i-1] == "\t" or codestr[i-1] == " ":
  41. if codestr[0] == "\n":
  42. # Since we're adding a line, we need to remove one line of any empty padding
  43. # to ensure line numbers are correct
  44. codestr = codestr[1:]
  45. return "if 1:\n" + codestr
  46. return codestr
  47. # Basically pickle, in python 2.7.3 at least, does badly with data duplication
  48. # upon pickling and unpickling. Combine this with duplicate objects and things
  49. # are a mess.
  50. #
  51. # When the sets are originally created, python calls intern() on the set keys
  52. # which significantly improves memory usage. Sadly the pickle/unpickle process
  53. # doesn't call intern() on the keys and results in the same strings being duplicated
  54. # in memory. This also means pickle will save the same string multiple times in
  55. # the cache file.
  56. #
  57. # By having shell and python cacheline objects with setstate/getstate, we force
  58. # the object creation through our own routine where we can call intern (via internSet).
  59. #
  60. # We also use hashable frozensets and ensure we use references to these so that
  61. # duplicates can be removed, both in memory and in the resulting pickled data.
  62. #
  63. # By playing these games, the size of the cache file shrinks dramatically
  64. # meaning faster load times and the reloaded cache files also consume much less
  65. # memory. Smaller cache files, faster load times and lower memory usage is good.
  66. #
  67. # A custom getstate/setstate using tuples is actually worth 15% cachesize by
  68. # avoiding duplication of the attribute names!
  69. class SetCache(object):
  70. def __init__(self):
  71. self.setcache = {}
  72. def internSet(self, items):
  73. new = []
  74. for i in items:
  75. new.append(sys.intern(i))
  76. s = frozenset(new)
  77. h = hash(s)
  78. if h in self.setcache:
  79. return self.setcache[h]
  80. self.setcache[h] = s
  81. return s
  82. codecache = SetCache()
  83. class pythonCacheLine(object):
  84. def __init__(self, refs, execs, contains):
  85. self.refs = codecache.internSet(refs)
  86. self.execs = codecache.internSet(execs)
  87. self.contains = {}
  88. for c in contains:
  89. self.contains[c] = codecache.internSet(contains[c])
  90. def __getstate__(self):
  91. return (self.refs, self.execs, self.contains)
  92. def __setstate__(self, state):
  93. (refs, execs, contains) = state
  94. self.__init__(refs, execs, contains)
  95. def __hash__(self):
  96. l = (hash(self.refs), hash(self.execs))
  97. for c in sorted(self.contains.keys()):
  98. l = l + (c, hash(self.contains[c]))
  99. return hash(l)
  100. def __repr__(self):
  101. return " ".join([str(self.refs), str(self.execs), str(self.contains)])
  102. class shellCacheLine(object):
  103. def __init__(self, execs):
  104. self.execs = codecache.internSet(execs)
  105. def __getstate__(self):
  106. return (self.execs)
  107. def __setstate__(self, state):
  108. (execs) = state
  109. self.__init__(execs)
  110. def __hash__(self):
  111. return hash(self.execs)
  112. def __repr__(self):
  113. return str(self.execs)
  114. class CodeParserCache(MultiProcessCache):
  115. cache_file_name = "bb_codeparser.dat"
  116. # NOTE: you must increment this if you change how the parsers gather information,
  117. # so that an existing cache gets invalidated. Additionally you'll need
  118. # to increment __cache_version__ in cache.py in order to ensure that old
  119. # recipe caches don't trigger "Taskhash mismatch" errors.
  120. CACHE_VERSION = 11
  121. def __init__(self):
  122. MultiProcessCache.__init__(self)
  123. self.pythoncache = self.cachedata[0]
  124. self.shellcache = self.cachedata[1]
  125. self.pythoncacheextras = self.cachedata_extras[0]
  126. self.shellcacheextras = self.cachedata_extras[1]
  127. # To avoid duplication in the codeparser cache, keep
  128. # a lookup of hashes of objects we already have
  129. self.pythoncachelines = {}
  130. self.shellcachelines = {}
  131. def newPythonCacheLine(self, refs, execs, contains):
  132. cacheline = pythonCacheLine(refs, execs, contains)
  133. h = hash(cacheline)
  134. if h in self.pythoncachelines:
  135. return self.pythoncachelines[h]
  136. self.pythoncachelines[h] = cacheline
  137. return cacheline
  138. def newShellCacheLine(self, execs):
  139. cacheline = shellCacheLine(execs)
  140. h = hash(cacheline)
  141. if h in self.shellcachelines:
  142. return self.shellcachelines[h]
  143. self.shellcachelines[h] = cacheline
  144. return cacheline
  145. def init_cache(self, d):
  146. # Check if we already have the caches
  147. if self.pythoncache:
  148. return
  149. MultiProcessCache.init_cache(self, d)
  150. # cachedata gets re-assigned in the parent
  151. self.pythoncache = self.cachedata[0]
  152. self.shellcache = self.cachedata[1]
  153. def create_cachedata(self):
  154. data = [{}, {}]
  155. return data
  156. codeparsercache = CodeParserCache()
  157. def parser_cache_init(d):
  158. codeparsercache.init_cache(d)
  159. def parser_cache_save():
  160. codeparsercache.save_extras()
  161. def parser_cache_savemerge():
  162. codeparsercache.save_merge()
  163. Logger = logging.getLoggerClass()
  164. class BufferedLogger(Logger):
  165. def __init__(self, name, level=0, target=None):
  166. Logger.__init__(self, name)
  167. self.setLevel(level)
  168. self.buffer = []
  169. self.target = target
  170. def handle(self, record):
  171. self.buffer.append(record)
  172. def flush(self):
  173. for record in self.buffer:
  174. if self.target.isEnabledFor(record.levelno):
  175. self.target.handle(record)
  176. self.buffer = []
  177. class PythonParser():
  178. getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional")
  179. getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
  180. containsfuncs = ("bb.utils.contains", "base_contains")
  181. containsanyfuncs = ("bb.utils.contains_any", "bb.utils.filter")
  182. execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
  183. def warn(self, func, arg):
  184. """Warn about calls of bitbake APIs which pass a non-literal
  185. argument for the variable name, as we're not able to track such
  186. a reference.
  187. """
  188. try:
  189. funcstr = codegen.to_source(func)
  190. argstr = codegen.to_source(arg)
  191. except TypeError:
  192. self.log.debug(2, 'Failed to convert function and argument to source form')
  193. else:
  194. self.log.debug(1, self.unhandled_message % (funcstr, argstr))
  195. def visit_Call(self, node):
  196. name = self.called_node_name(node.func)
  197. if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
  198. if isinstance(node.args[0], ast.Str):
  199. varname = node.args[0].s
  200. if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
  201. if varname not in self.contains:
  202. self.contains[varname] = set()
  203. self.contains[varname].add(node.args[1].s)
  204. elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str):
  205. if varname not in self.contains:
  206. self.contains[varname] = set()
  207. self.contains[varname].update(node.args[1].s.split())
  208. elif name.endswith(self.getvarflags):
  209. if isinstance(node.args[1], ast.Str):
  210. self.references.add('%s[%s]' % (varname, node.args[1].s))
  211. else:
  212. self.warn(node.func, node.args[1])
  213. else:
  214. self.references.add(varname)
  215. else:
  216. self.warn(node.func, node.args[0])
  217. elif name and name.endswith(".expand"):
  218. if isinstance(node.args[0], ast.Str):
  219. value = node.args[0].s
  220. d = bb.data.init()
  221. parser = d.expandWithRefs(value, self.name)
  222. self.references |= parser.references
  223. self.execs |= parser.execs
  224. for varname in parser.contains:
  225. if varname not in self.contains:
  226. self.contains[varname] = set()
  227. self.contains[varname] |= parser.contains[varname]
  228. elif name in self.execfuncs:
  229. if isinstance(node.args[0], ast.Str):
  230. self.var_execs.add(node.args[0].s)
  231. else:
  232. self.warn(node.func, node.args[0])
  233. elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
  234. self.execs.add(name)
  235. def called_node_name(self, node):
  236. """Given a called node, return its original string form"""
  237. components = []
  238. while node:
  239. if isinstance(node, ast.Attribute):
  240. components.append(node.attr)
  241. node = node.value
  242. elif isinstance(node, ast.Name):
  243. components.append(node.id)
  244. return '.'.join(reversed(components))
  245. else:
  246. break
  247. def __init__(self, name, log):
  248. self.name = name
  249. self.var_execs = set()
  250. self.contains = {}
  251. self.execs = set()
  252. self.references = set()
  253. self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
  254. self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
  255. self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
  256. def parse_python(self, node, lineno=0, filename="<string>"):
  257. if not node or not node.strip():
  258. return
  259. h = bbhash(str(node))
  260. if h in codeparsercache.pythoncache:
  261. self.references = set(codeparsercache.pythoncache[h].refs)
  262. self.execs = set(codeparsercache.pythoncache[h].execs)
  263. self.contains = {}
  264. for i in codeparsercache.pythoncache[h].contains:
  265. self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
  266. return
  267. if h in codeparsercache.pythoncacheextras:
  268. self.references = set(codeparsercache.pythoncacheextras[h].refs)
  269. self.execs = set(codeparsercache.pythoncacheextras[h].execs)
  270. self.contains = {}
  271. for i in codeparsercache.pythoncacheextras[h].contains:
  272. self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
  273. return
  274. # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
  275. node = "\n" * int(lineno) + node
  276. code = compile(check_indent(str(node)), filename, "exec",
  277. ast.PyCF_ONLY_AST)
  278. for n in ast.walk(code):
  279. if n.__class__.__name__ == "Call":
  280. self.visit_Call(n)
  281. self.execs.update(self.var_execs)
  282. codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
  283. class ShellParser():
  284. def __init__(self, name, log):
  285. self.funcdefs = set()
  286. self.allexecs = set()
  287. self.execs = set()
  288. self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
  289. self.unhandled_template = "unable to handle non-literal command '%s'"
  290. self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
  291. def parse_shell(self, value):
  292. """Parse the supplied shell code in a string, returning the external
  293. commands it executes.
  294. """
  295. h = bbhash(str(value))
  296. if h in codeparsercache.shellcache:
  297. self.execs = set(codeparsercache.shellcache[h].execs)
  298. return self.execs
  299. if h in codeparsercache.shellcacheextras:
  300. self.execs = set(codeparsercache.shellcacheextras[h].execs)
  301. return self.execs
  302. self._parse_shell(value)
  303. self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
  304. codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
  305. return self.execs
  306. def _parse_shell(self, value):
  307. try:
  308. tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
  309. except Exception:
  310. bb.error('Error during parse shell code, the last 5 lines are:\n%s' % '\n'.join(value.split('\n')[-5:]))
  311. raise
  312. self.process_tokens(tokens)
  313. def process_tokens(self, tokens):
  314. """Process a supplied portion of the syntax tree as returned by
  315. pyshyacc.parse.
  316. """
  317. def function_definition(value):
  318. self.funcdefs.add(value.name)
  319. return [value.body], None
  320. def case_clause(value):
  321. # Element 0 of each item in the case is the list of patterns, and
  322. # Element 1 of each item in the case is the list of commands to be
  323. # executed when that pattern matches.
  324. words = chain(*[item[0] for item in value.items])
  325. cmds = chain(*[item[1] for item in value.items])
  326. return cmds, words
  327. def if_clause(value):
  328. main = chain(value.cond, value.if_cmds)
  329. rest = value.else_cmds
  330. if isinstance(rest, tuple) and rest[0] == "elif":
  331. return chain(main, if_clause(rest[1]))
  332. else:
  333. return chain(main, rest)
  334. def simple_command(value):
  335. return None, chain(value.words, (assign[1] for assign in value.assigns))
  336. token_handlers = {
  337. "and_or": lambda x: ((x.left, x.right), None),
  338. "async": lambda x: ([x], None),
  339. "brace_group": lambda x: (x.cmds, None),
  340. "for_clause": lambda x: (x.cmds, x.items),
  341. "function_definition": function_definition,
  342. "if_clause": lambda x: (if_clause(x), None),
  343. "pipeline": lambda x: (x.commands, None),
  344. "redirect_list": lambda x: ([x.cmd], None),
  345. "subshell": lambda x: (x.cmds, None),
  346. "while_clause": lambda x: (chain(x.condition, x.cmds), None),
  347. "until_clause": lambda x: (chain(x.condition, x.cmds), None),
  348. "simple_command": simple_command,
  349. "case_clause": case_clause,
  350. }
  351. def process_token_list(tokens):
  352. for token in tokens:
  353. if isinstance(token, list):
  354. process_token_list(token)
  355. continue
  356. name, value = token
  357. try:
  358. more_tokens, words = token_handlers[name](value)
  359. except KeyError:
  360. raise NotImplementedError("Unsupported token type " + name)
  361. if more_tokens:
  362. self.process_tokens(more_tokens)
  363. if words:
  364. self.process_words(words)
  365. process_token_list(tokens)
  366. def process_words(self, words):
  367. """Process a set of 'words' in pyshyacc parlance, which includes
  368. extraction of executed commands from $() blocks, as well as grabbing
  369. the command name argument.
  370. """
  371. words = list(words)
  372. for word in list(words):
  373. wtree = pyshlex.make_wordtree(word[1])
  374. for part in wtree:
  375. if not isinstance(part, list):
  376. continue
  377. if part[0] in ('`', '$('):
  378. command = pyshlex.wordtree_as_string(part[1:-1])
  379. self._parse_shell(command)
  380. if word[0] in ("cmd_name", "cmd_word"):
  381. if word in words:
  382. words.remove(word)
  383. usetoken = False
  384. for word in words:
  385. if word[0] in ("cmd_name", "cmd_word") or \
  386. (usetoken and word[0] == "TOKEN"):
  387. if "=" in word[1]:
  388. usetoken = True
  389. continue
  390. cmd = word[1]
  391. if cmd.startswith("$"):
  392. self.log.debug(1, self.unhandled_template % cmd)
  393. elif cmd == "eval":
  394. command = " ".join(word for _, word in words[1:])
  395. self._parse_shell(command)
  396. else:
  397. self.allexecs.add(cmd)
  398. break