utils.py 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641
  1. """
  2. BitBake Utility Functions
  3. """
  4. # Copyright (C) 2004 Michael Lauer
  5. #
  6. # SPDX-License-Identifier: GPL-2.0-only
  7. #
  8. import re, fcntl, os, string, stat, shutil, time
  9. import sys
  10. import errno
  11. import logging
  12. import bb
  13. import bb.msg
  14. import multiprocessing
  15. import fcntl
  16. import importlib
  17. from importlib import machinery
  18. import itertools
  19. import subprocess
  20. import glob
  21. import fnmatch
  22. import traceback
  23. import errno
  24. import signal
  25. import collections
  26. import copy
  27. from subprocess import getstatusoutput
  28. from contextlib import contextmanager
  29. from ctypes import cdll
  30. logger = logging.getLogger("BitBake.Util")
  31. python_extensions = importlib.machinery.all_suffixes()
  32. def clean_context():
  33. return {
  34. "os": os,
  35. "bb": bb,
  36. "time": time,
  37. }
  38. def get_context():
  39. return _context
  40. def set_context(ctx):
  41. _context = ctx
  42. # Context used in better_exec, eval
  43. _context = clean_context()
  44. class VersionStringException(Exception):
  45. """Exception raised when an invalid version specification is found"""
  46. def explode_version(s):
  47. r = []
  48. alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
  49. numeric_regexp = re.compile(r'^(\d+)(.*)$')
  50. while (s != ''):
  51. if s[0] in string.digits:
  52. m = numeric_regexp.match(s)
  53. r.append((0, int(m.group(1))))
  54. s = m.group(2)
  55. continue
  56. if s[0] in string.ascii_letters:
  57. m = alpha_regexp.match(s)
  58. r.append((1, m.group(1)))
  59. s = m.group(2)
  60. continue
  61. if s[0] == '~':
  62. r.append((-1, s[0]))
  63. else:
  64. r.append((2, s[0]))
  65. s = s[1:]
  66. return r
  67. def split_version(s):
  68. """Split a version string into its constituent parts (PE, PV, PR)"""
  69. s = s.strip(" <>=")
  70. e = 0
  71. if s.count(':'):
  72. e = int(s.split(":")[0])
  73. s = s.split(":")[1]
  74. r = ""
  75. if s.count('-'):
  76. r = s.rsplit("-", 1)[1]
  77. s = s.rsplit("-", 1)[0]
  78. v = s
  79. return (e, v, r)
  80. def vercmp_part(a, b):
  81. va = explode_version(a)
  82. vb = explode_version(b)
  83. while True:
  84. if va == []:
  85. (oa, ca) = (0, None)
  86. else:
  87. (oa, ca) = va.pop(0)
  88. if vb == []:
  89. (ob, cb) = (0, None)
  90. else:
  91. (ob, cb) = vb.pop(0)
  92. if (oa, ca) == (0, None) and (ob, cb) == (0, None):
  93. return 0
  94. if oa < ob:
  95. return -1
  96. elif oa > ob:
  97. return 1
  98. elif ca is None:
  99. return -1
  100. elif cb is None:
  101. return 1
  102. elif ca < cb:
  103. return -1
  104. elif ca > cb:
  105. return 1
  106. def vercmp(ta, tb):
  107. (ea, va, ra) = ta
  108. (eb, vb, rb) = tb
  109. r = int(ea or 0) - int(eb or 0)
  110. if (r == 0):
  111. r = vercmp_part(va, vb)
  112. if (r == 0):
  113. r = vercmp_part(ra, rb)
  114. return r
  115. def vercmp_string(a, b):
  116. ta = split_version(a)
  117. tb = split_version(b)
  118. return vercmp(ta, tb)
  119. def vercmp_string_op(a, b, op):
  120. """
  121. Compare two versions and check if the specified comparison operator matches the result of the comparison.
  122. This function is fairly liberal about what operators it will accept since there are a variety of styles
  123. depending on the context.
  124. """
  125. res = vercmp_string(a, b)
  126. if op in ('=', '=='):
  127. return res == 0
  128. elif op == '<=':
  129. return res <= 0
  130. elif op == '>=':
  131. return res >= 0
  132. elif op in ('>', '>>'):
  133. return res > 0
  134. elif op in ('<', '<<'):
  135. return res < 0
  136. elif op == '!=':
  137. return res != 0
  138. else:
  139. raise VersionStringException('Unsupported comparison operator "%s"' % op)
  140. def explode_deps(s):
  141. """
  142. Take an RDEPENDS style string of format:
  143. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  144. and return a list of dependencies.
  145. Version information is ignored.
  146. """
  147. r = []
  148. l = s.split()
  149. flag = False
  150. for i in l:
  151. if i[0] == '(':
  152. flag = True
  153. #j = []
  154. if not flag:
  155. r.append(i)
  156. #else:
  157. # j.append(i)
  158. if flag and i.endswith(')'):
  159. flag = False
  160. # Ignore version
  161. #r[-1] += ' ' + ' '.join(j)
  162. return r
  163. def explode_dep_versions2(s, *, sort=True):
  164. """
  165. Take an RDEPENDS style string of format:
  166. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  167. and return a dictionary of dependencies and versions.
  168. """
  169. r = collections.OrderedDict()
  170. l = s.replace(",", "").split()
  171. lastdep = None
  172. lastcmp = ""
  173. lastver = ""
  174. incmp = False
  175. inversion = False
  176. for i in l:
  177. if i[0] == '(':
  178. incmp = True
  179. i = i[1:].strip()
  180. if not i:
  181. continue
  182. if incmp:
  183. incmp = False
  184. inversion = True
  185. # This list is based on behavior and supported comparisons from deb, opkg and rpm.
  186. #
  187. # Even though =<, <<, ==, !=, =>, and >> may not be supported,
  188. # we list each possibly valid item.
  189. # The build system is responsible for validation of what it supports.
  190. if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
  191. lastcmp = i[0:2]
  192. i = i[2:]
  193. elif i.startswith(('<', '>', '=')):
  194. lastcmp = i[0:1]
  195. i = i[1:]
  196. else:
  197. # This is an unsupported case!
  198. raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
  199. lastcmp = (i or "")
  200. i = ""
  201. i.strip()
  202. if not i:
  203. continue
  204. if inversion:
  205. if i.endswith(')'):
  206. i = i[:-1] or ""
  207. inversion = False
  208. if lastver and i:
  209. lastver += " "
  210. if i:
  211. lastver += i
  212. if lastdep not in r:
  213. r[lastdep] = []
  214. r[lastdep].append(lastcmp + " " + lastver)
  215. continue
  216. #if not inversion:
  217. lastdep = i
  218. lastver = ""
  219. lastcmp = ""
  220. if not (i in r and r[i]):
  221. r[lastdep] = []
  222. if sort:
  223. r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
  224. return r
  225. def explode_dep_versions(s):
  226. r = explode_dep_versions2(s)
  227. for d in r:
  228. if not r[d]:
  229. r[d] = None
  230. continue
  231. if len(r[d]) > 1:
  232. bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
  233. r[d] = r[d][0]
  234. return r
  235. def join_deps(deps, commasep=True):
  236. """
  237. Take the result from explode_dep_versions and generate a dependency string
  238. """
  239. result = []
  240. for dep in deps:
  241. if deps[dep]:
  242. if isinstance(deps[dep], list):
  243. for v in deps[dep]:
  244. result.append(dep + " (" + v + ")")
  245. else:
  246. result.append(dep + " (" + deps[dep] + ")")
  247. else:
  248. result.append(dep)
  249. if commasep:
  250. return ", ".join(result)
  251. else:
  252. return " ".join(result)
  253. def _print_trace(body, line):
  254. """
  255. Print the Environment of a Text Body
  256. """
  257. error = []
  258. # print the environment of the method
  259. min_line = max(1, line-4)
  260. max_line = min(line + 4, len(body))
  261. for i in range(min_line, max_line + 1):
  262. if line == i:
  263. error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
  264. else:
  265. error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
  266. return error
  267. def better_compile(text, file, realfile, mode = "exec", lineno = 0):
  268. """
  269. A better compile method. This method
  270. will print the offending lines.
  271. """
  272. try:
  273. cache = bb.methodpool.compile_cache(text)
  274. if cache:
  275. return cache
  276. # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
  277. text2 = "\n" * int(lineno) + text
  278. code = compile(text2, realfile, mode)
  279. bb.methodpool.compile_cache_add(text, code)
  280. return code
  281. except Exception as e:
  282. error = []
  283. # split the text into lines again
  284. body = text.split('\n')
  285. error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
  286. if hasattr(e, "lineno"):
  287. error.append("The code lines resulting in this error were:")
  288. # e.lineno: line's position in reaflile
  289. # lineno: function name's "position -1" in realfile
  290. # e.lineno - lineno: line's relative position in function
  291. error.extend(_print_trace(body, e.lineno - lineno))
  292. else:
  293. error.append("The function causing this error was:")
  294. for line in body:
  295. error.append(line)
  296. error.append("%s: %s" % (e.__class__.__name__, str(e)))
  297. logger.error("\n".join(error))
  298. e = bb.BBHandledException(e)
  299. raise e
  300. def _print_exception(t, value, tb, realfile, text, context):
  301. error = []
  302. try:
  303. exception = traceback.format_exception_only(t, value)
  304. error.append('Error executing a python function in %s:\n' % realfile)
  305. # Strip 'us' from the stack (better_exec call) unless that was where the
  306. # error came from
  307. if tb.tb_next is not None:
  308. tb = tb.tb_next
  309. textarray = text.split('\n')
  310. linefailed = tb.tb_lineno
  311. tbextract = traceback.extract_tb(tb)
  312. tbformat = traceback.format_list(tbextract)
  313. error.append("The stack trace of python calls that resulted in this exception/failure was:")
  314. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
  315. error.extend(_print_trace(textarray, linefailed))
  316. # See if this is a function we constructed and has calls back into other functions in
  317. # "text". If so, try and improve the context of the error by diving down the trace
  318. level = 0
  319. nexttb = tb.tb_next
  320. while nexttb is not None and (level+1) < len(tbextract):
  321. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
  322. if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
  323. # The code was possibly in the string we compiled ourselves
  324. error.extend(_print_trace(textarray, tbextract[level+1][1]))
  325. elif tbextract[level+1][0].startswith("/"):
  326. # The code looks like it might be in a file, try and load it
  327. try:
  328. with open(tbextract[level+1][0], "r") as f:
  329. text = f.readlines()
  330. error.extend(_print_trace(text, tbextract[level+1][1]))
  331. except:
  332. error.append(tbformat[level+1])
  333. else:
  334. error.append(tbformat[level+1])
  335. nexttb = tb.tb_next
  336. level = level + 1
  337. error.append("Exception: %s" % ''.join(exception))
  338. # If the exception is from spwaning a task, let's be helpful and display
  339. # the output (which hopefully includes stderr).
  340. if isinstance(value, subprocess.CalledProcessError) and value.output:
  341. error.append("Subprocess output:")
  342. error.append(value.output.decode("utf-8", errors="ignore"))
  343. finally:
  344. logger.error("\n".join(error))
  345. def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
  346. """
  347. Similiar to better_compile, better_exec will
  348. print the lines that are responsible for the
  349. error.
  350. """
  351. import bb.parse
  352. if not text:
  353. text = code
  354. if not hasattr(code, "co_filename"):
  355. code = better_compile(code, realfile, realfile)
  356. try:
  357. exec(code, get_context(), context)
  358. except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError):
  359. # Error already shown so passthrough, no need for traceback
  360. raise
  361. except Exception as e:
  362. if pythonexception:
  363. raise
  364. (t, value, tb) = sys.exc_info()
  365. try:
  366. _print_exception(t, value, tb, realfile, text, context)
  367. except Exception as e2:
  368. logger.error("Exception handler error: %s" % str(e2))
  369. e = bb.BBHandledException(e)
  370. raise e
  371. def simple_exec(code, context):
  372. exec(code, get_context(), context)
  373. def better_eval(source, locals, extraglobals = None):
  374. ctx = get_context()
  375. if extraglobals:
  376. ctx = copy.copy(ctx)
  377. for g in extraglobals:
  378. ctx[g] = extraglobals[g]
  379. return eval(source, ctx, locals)
  380. @contextmanager
  381. def fileslocked(files):
  382. """Context manager for locking and unlocking file locks."""
  383. locks = []
  384. if files:
  385. for lockfile in files:
  386. locks.append(bb.utils.lockfile(lockfile))
  387. try:
  388. yield
  389. finally:
  390. for lock in locks:
  391. bb.utils.unlockfile(lock)
  392. def lockfile(name, shared=False, retry=True, block=False):
  393. """
  394. Use the specified file as a lock file, return when the lock has
  395. been acquired. Returns a variable to pass to unlockfile().
  396. Parameters:
  397. retry: True to re-try locking if it fails, False otherwise
  398. block: True to block until the lock succeeds, False otherwise
  399. The retry and block parameters are kind of equivalent unless you
  400. consider the possibility of sending a signal to the process to break
  401. out - at which point you want block=True rather than retry=True.
  402. """
  403. dirname = os.path.dirname(name)
  404. mkdirhier(dirname)
  405. if not os.access(dirname, os.W_OK):
  406. logger.error("Unable to acquire lock '%s', directory is not writable",
  407. name)
  408. sys.exit(1)
  409. op = fcntl.LOCK_EX
  410. if shared:
  411. op = fcntl.LOCK_SH
  412. if not retry and not block:
  413. op = op | fcntl.LOCK_NB
  414. while True:
  415. # If we leave the lockfiles lying around there is no problem
  416. # but we should clean up after ourselves. This gives potential
  417. # for races though. To work around this, when we acquire the lock
  418. # we check the file we locked was still the lock file on disk.
  419. # by comparing inode numbers. If they don't match or the lockfile
  420. # no longer exists, we start again.
  421. # This implementation is unfair since the last person to request the
  422. # lock is the most likely to win it.
  423. try:
  424. lf = open(name, 'a+')
  425. fileno = lf.fileno()
  426. fcntl.flock(fileno, op)
  427. statinfo = os.fstat(fileno)
  428. if os.path.exists(lf.name):
  429. statinfo2 = os.stat(lf.name)
  430. if statinfo.st_ino == statinfo2.st_ino:
  431. return lf
  432. lf.close()
  433. except OSError as e:
  434. if e.errno == errno.EACCES:
  435. logger.error("Unable to acquire lock '%s', %s",
  436. e.strerror, name)
  437. sys.exit(1)
  438. try:
  439. lf.close()
  440. except Exception:
  441. pass
  442. pass
  443. if not retry:
  444. return None
  445. def unlockfile(lf):
  446. """
  447. Unlock a file locked using lockfile()
  448. """
  449. try:
  450. # If we had a shared lock, we need to promote to exclusive before
  451. # removing the lockfile. Attempt this, ignore failures.
  452. fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
  453. os.unlink(lf.name)
  454. except (IOError, OSError):
  455. pass
  456. fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
  457. lf.close()
  458. def _hasher(method, filename):
  459. import mmap
  460. with open(filename, "rb") as f:
  461. try:
  462. with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
  463. for chunk in iter(lambda: mm.read(8192), b''):
  464. method.update(chunk)
  465. except ValueError:
  466. # You can't mmap() an empty file so silence this exception
  467. pass
  468. return method.hexdigest()
  469. def md5_file(filename):
  470. """
  471. Return the hex string representation of the MD5 checksum of filename.
  472. """
  473. import hashlib
  474. return _hasher(hashlib.md5(), filename)
  475. def sha256_file(filename):
  476. """
  477. Return the hex string representation of the 256-bit SHA checksum of
  478. filename.
  479. """
  480. import hashlib
  481. return _hasher(hashlib.sha256(), filename)
  482. def sha1_file(filename):
  483. """
  484. Return the hex string representation of the SHA1 checksum of the filename
  485. """
  486. import hashlib
  487. return _hasher(hashlib.sha1(), filename)
  488. def sha384_file(filename):
  489. """
  490. Return the hex string representation of the SHA384 checksum of the filename
  491. """
  492. import hashlib
  493. return _hasher(hashlib.sha384(), filename)
  494. def sha512_file(filename):
  495. """
  496. Return the hex string representation of the SHA512 checksum of the filename
  497. """
  498. import hashlib
  499. return _hasher(hashlib.sha512(), filename)
  500. def preserved_envvars_exported():
  501. """Variables which are taken from the environment and placed in and exported
  502. from the metadata"""
  503. return [
  504. 'BB_TASKHASH',
  505. 'HOME',
  506. 'LOGNAME',
  507. 'PATH',
  508. 'PWD',
  509. 'SHELL',
  510. 'USER',
  511. 'LC_ALL',
  512. 'BBSERVER',
  513. ]
  514. def preserved_envvars():
  515. """Variables which are taken from the environment and placed in the metadata"""
  516. v = [
  517. 'BBPATH',
  518. 'BB_PRESERVE_ENV',
  519. 'BB_ENV_WHITELIST',
  520. 'BB_ENV_EXTRAWHITE',
  521. ]
  522. return v + preserved_envvars_exported()
  523. def filter_environment(good_vars):
  524. """
  525. Create a pristine environment for bitbake. This will remove variables that
  526. are not known and may influence the build in a negative way.
  527. """
  528. removed_vars = {}
  529. for key in list(os.environ):
  530. if key in good_vars:
  531. continue
  532. removed_vars[key] = os.environ[key]
  533. del os.environ[key]
  534. # If we spawn a python process, we need to have a UTF-8 locale, else python's file
  535. # access methods will use ascii. You can't change that mode once the interpreter is
  536. # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
  537. # distros support that and we need to set something.
  538. os.environ["LC_ALL"] = "en_US.UTF-8"
  539. if removed_vars:
  540. logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
  541. return removed_vars
  542. def approved_variables():
  543. """
  544. Determine and return the list of whitelisted variables which are approved
  545. to remain in the environment.
  546. """
  547. if 'BB_PRESERVE_ENV' in os.environ:
  548. return os.environ.keys()
  549. approved = []
  550. if 'BB_ENV_WHITELIST' in os.environ:
  551. approved = os.environ['BB_ENV_WHITELIST'].split()
  552. approved.extend(['BB_ENV_WHITELIST'])
  553. else:
  554. approved = preserved_envvars()
  555. if 'BB_ENV_EXTRAWHITE' in os.environ:
  556. approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
  557. if 'BB_ENV_EXTRAWHITE' not in approved:
  558. approved.extend(['BB_ENV_EXTRAWHITE'])
  559. return approved
  560. def clean_environment():
  561. """
  562. Clean up any spurious environment variables. This will remove any
  563. variables the user hasn't chosen to preserve.
  564. """
  565. if 'BB_PRESERVE_ENV' not in os.environ:
  566. good_vars = approved_variables()
  567. return filter_environment(good_vars)
  568. return {}
  569. def empty_environment():
  570. """
  571. Remove all variables from the environment.
  572. """
  573. for s in list(os.environ.keys()):
  574. os.unsetenv(s)
  575. del os.environ[s]
  576. def build_environment(d):
  577. """
  578. Build an environment from all exported variables.
  579. """
  580. import bb.data
  581. for var in bb.data.keys(d):
  582. export = d.getVarFlag(var, "export", False)
  583. if export:
  584. os.environ[var] = d.getVar(var) or ""
  585. def _check_unsafe_delete_path(path):
  586. """
  587. Basic safeguard against recursively deleting something we shouldn't. If it returns True,
  588. the caller should raise an exception with an appropriate message.
  589. NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
  590. with potentially disastrous results.
  591. """
  592. extra = ''
  593. # HOME might not be /home/something, so in case we can get it, check against it
  594. homedir = os.environ.get('HOME', '')
  595. if homedir:
  596. extra = '|%s' % homedir
  597. if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
  598. return True
  599. return False
  600. def remove(path, recurse=False, ionice=False):
  601. """Equivalent to rm -f or rm -rf"""
  602. if not path:
  603. return
  604. if recurse:
  605. for name in glob.glob(path):
  606. if _check_unsafe_delete_path(path):
  607. raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
  608. # shutil.rmtree(name) would be ideal but its too slow
  609. cmd = []
  610. if ionice:
  611. cmd = ['ionice', '-c', '3']
  612. subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
  613. return
  614. for name in glob.glob(path):
  615. try:
  616. os.unlink(name)
  617. except OSError as exc:
  618. if exc.errno != errno.ENOENT:
  619. raise
  620. def prunedir(topdir, ionice=False):
  621. # Delete everything reachable from the directory named in 'topdir'.
  622. # CAUTION: This is dangerous!
  623. if _check_unsafe_delete_path(topdir):
  624. raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
  625. remove(topdir, recurse=True, ionice=ionice)
  626. #
  627. # Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
  628. # but thats possibly insane and suffixes is probably going to be small
  629. #
  630. def prune_suffix(var, suffixes, d):
  631. # See if var ends with any of the suffixes listed and
  632. # remove it if found
  633. for suffix in suffixes:
  634. if suffix and var.endswith(suffix):
  635. return var[:-len(suffix)]
  636. return var
  637. def mkdirhier(directory):
  638. """Create a directory like 'mkdir -p', but does not complain if
  639. directory already exists like os.makedirs
  640. """
  641. try:
  642. os.makedirs(directory)
  643. except OSError as e:
  644. if e.errno != errno.EEXIST or not os.path.isdir(directory):
  645. raise e
  646. def movefile(src, dest, newmtime = None, sstat = None):
  647. """Moves a file from src to dest, preserving all permissions and
  648. attributes; mtime will be preserved even when moving across
  649. filesystems. Returns true on success and false on failure. Move is
  650. atomic.
  651. """
  652. #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  653. try:
  654. if not sstat:
  655. sstat = os.lstat(src)
  656. except Exception as e:
  657. print("movefile: Stating source file failed...", e)
  658. return None
  659. destexists = 1
  660. try:
  661. dstat = os.lstat(dest)
  662. except:
  663. dstat = os.lstat(os.path.dirname(dest))
  664. destexists = 0
  665. if destexists:
  666. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  667. try:
  668. os.unlink(dest)
  669. destexists = 0
  670. except Exception as e:
  671. pass
  672. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  673. try:
  674. target = os.readlink(src)
  675. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  676. os.unlink(dest)
  677. os.symlink(target, dest)
  678. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  679. os.unlink(src)
  680. return os.lstat(dest)
  681. except Exception as e:
  682. print("movefile: failed to properly create symlink:", dest, "->", target, e)
  683. return None
  684. renamefailed = 1
  685. # os.rename needs to know the dest path ending with file name
  686. # so append the file name to a path only if it's a dir specified
  687. srcfname = os.path.basename(src)
  688. destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
  689. else dest
  690. if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
  691. try:
  692. os.rename(src, destpath)
  693. renamefailed = 0
  694. except Exception as e:
  695. if e.errno != errno.EXDEV:
  696. # Some random error.
  697. print("movefile: Failed to move", src, "to", dest, e)
  698. return None
  699. # Invalid cross-device-link 'bind' mounted or actually Cross-Device
  700. if renamefailed:
  701. didcopy = 0
  702. if stat.S_ISREG(sstat[stat.ST_MODE]):
  703. try: # For safety copy then move it over.
  704. shutil.copyfile(src, destpath + "#new")
  705. os.rename(destpath + "#new", destpath)
  706. didcopy = 1
  707. except Exception as e:
  708. print('movefile: copy', src, '->', dest, 'failed.', e)
  709. return None
  710. else:
  711. #we don't yet handle special, so we need to fall back to /bin/mv
  712. a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
  713. if a[0] != 0:
  714. print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
  715. return None # failure
  716. try:
  717. if didcopy:
  718. os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
  719. os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  720. os.unlink(src)
  721. except Exception as e:
  722. print("movefile: Failed to chown/chmod/unlink", dest, e)
  723. return None
  724. if newmtime:
  725. os.utime(destpath, (newmtime, newmtime))
  726. else:
  727. os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  728. newmtime = sstat[stat.ST_MTIME]
  729. return newmtime
  730. def copyfile(src, dest, newmtime = None, sstat = None):
  731. """
  732. Copies a file from src to dest, preserving all permissions and
  733. attributes; mtime will be preserved even when moving across
  734. filesystems. Returns true on success and false on failure.
  735. """
  736. #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  737. try:
  738. if not sstat:
  739. sstat = os.lstat(src)
  740. except Exception as e:
  741. logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
  742. return False
  743. destexists = 1
  744. try:
  745. dstat = os.lstat(dest)
  746. except:
  747. dstat = os.lstat(os.path.dirname(dest))
  748. destexists = 0
  749. if destexists:
  750. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  751. try:
  752. os.unlink(dest)
  753. destexists = 0
  754. except Exception as e:
  755. pass
  756. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  757. try:
  758. target = os.readlink(src)
  759. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  760. os.unlink(dest)
  761. os.symlink(target, dest)
  762. os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  763. return os.lstat(dest)
  764. except Exception as e:
  765. logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
  766. return False
  767. if stat.S_ISREG(sstat[stat.ST_MODE]):
  768. try:
  769. srcchown = False
  770. if not os.access(src, os.R_OK):
  771. # Make sure we can read it
  772. srcchown = True
  773. os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
  774. # For safety copy then move it over.
  775. shutil.copyfile(src, dest + "#new")
  776. os.rename(dest + "#new", dest)
  777. except Exception as e:
  778. logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
  779. return False
  780. finally:
  781. if srcchown:
  782. os.chmod(src, sstat[stat.ST_MODE])
  783. os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  784. else:
  785. #we don't yet handle special, so we need to fall back to /bin/mv
  786. a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
  787. if a[0] != 0:
  788. logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
  789. return False # failure
  790. try:
  791. os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
  792. os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  793. except Exception as e:
  794. logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
  795. return False
  796. if newmtime:
  797. os.utime(dest, (newmtime, newmtime))
  798. else:
  799. os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  800. newmtime = sstat[stat.ST_MTIME]
  801. return newmtime
  802. def break_hardlinks(src, sstat = None):
  803. """
  804. Ensures src is the only hardlink to this file. Other hardlinks,
  805. if any, are not affected (other than in their st_nlink value, of
  806. course). Returns true on success and false on failure.
  807. """
  808. try:
  809. if not sstat:
  810. sstat = os.lstat(src)
  811. except Exception as e:
  812. logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
  813. return False
  814. if sstat[stat.ST_NLINK] == 1:
  815. return True
  816. return copyfile(src, src, sstat=sstat)
  817. def which(path, item, direction = 0, history = False, executable=False):
  818. """
  819. Locate `item` in the list of paths `path` (colon separated string like $PATH).
  820. If `direction` is non-zero then the list is reversed.
  821. If `history` is True then the list of candidates also returned as result,history.
  822. If `executable` is True then the candidate has to be an executable file,
  823. otherwise the candidate simply has to exist.
  824. """
  825. if executable:
  826. is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
  827. else:
  828. is_candidate = lambda p: os.path.exists(p)
  829. hist = []
  830. paths = (path or "").split(':')
  831. if direction != 0:
  832. paths.reverse()
  833. for p in paths:
  834. next = os.path.join(p, item)
  835. hist.append(next)
  836. if is_candidate(next):
  837. if not os.path.isabs(next):
  838. next = os.path.abspath(next)
  839. if history:
  840. return next, hist
  841. return next
  842. if history:
  843. return "", hist
  844. return ""
  845. @contextmanager
  846. def umask(new_mask):
  847. """
  848. Context manager to set the umask to a specific mask, and restore it afterwards.
  849. """
  850. current_mask = os.umask(new_mask)
  851. try:
  852. yield
  853. finally:
  854. os.umask(current_mask)
  855. def to_boolean(string, default=None):
  856. if not string:
  857. return default
  858. normalized = string.lower()
  859. if normalized in ("y", "yes", "1", "true"):
  860. return True
  861. elif normalized in ("n", "no", "0", "false"):
  862. return False
  863. else:
  864. raise ValueError("Invalid value for to_boolean: %s" % string)
  865. def contains(variable, checkvalues, truevalue, falsevalue, d):
  866. """Check if a variable contains all the values specified.
  867. Arguments:
  868. variable -- the variable name. This will be fetched and expanded (using
  869. d.getVar(variable)) and then split into a set().
  870. checkvalues -- if this is a string it is split on whitespace into a set(),
  871. otherwise coerced directly into a set().
  872. truevalue -- the value to return if checkvalues is a subset of variable.
  873. falsevalue -- the value to return if variable is empty or if checkvalues is
  874. not a subset of variable.
  875. d -- the data store.
  876. """
  877. val = d.getVar(variable)
  878. if not val:
  879. return falsevalue
  880. val = set(val.split())
  881. if isinstance(checkvalues, str):
  882. checkvalues = set(checkvalues.split())
  883. else:
  884. checkvalues = set(checkvalues)
  885. if checkvalues.issubset(val):
  886. return truevalue
  887. return falsevalue
  888. def contains_any(variable, checkvalues, truevalue, falsevalue, d):
  889. val = d.getVar(variable)
  890. if not val:
  891. return falsevalue
  892. val = set(val.split())
  893. if isinstance(checkvalues, str):
  894. checkvalues = set(checkvalues.split())
  895. else:
  896. checkvalues = set(checkvalues)
  897. if checkvalues & val:
  898. return truevalue
  899. return falsevalue
  900. def filter(variable, checkvalues, d):
  901. """Return all words in the variable that are present in the checkvalues.
  902. Arguments:
  903. variable -- the variable name. This will be fetched and expanded (using
  904. d.getVar(variable)) and then split into a set().
  905. checkvalues -- if this is a string it is split on whitespace into a set(),
  906. otherwise coerced directly into a set().
  907. d -- the data store.
  908. """
  909. val = d.getVar(variable)
  910. if not val:
  911. return ''
  912. val = set(val.split())
  913. if isinstance(checkvalues, str):
  914. checkvalues = set(checkvalues.split())
  915. else:
  916. checkvalues = set(checkvalues)
  917. return ' '.join(sorted(checkvalues & val))
  918. def get_referenced_vars(start_expr, d):
  919. """
  920. :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
  921. are ordered arbitrarily)
  922. """
  923. seen = set()
  924. ret = []
  925. # The first entry in the queue is the unexpanded start expression
  926. queue = collections.deque([start_expr])
  927. # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
  928. is_first = True
  929. empty_data = bb.data.init()
  930. while queue:
  931. entry = queue.popleft()
  932. if is_first:
  933. # Entry is the start expression - no expansion needed
  934. is_first = False
  935. expression = entry
  936. else:
  937. # This is a variable name - need to get the value
  938. expression = d.getVar(entry, False)
  939. ret.append(entry)
  940. # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
  941. # data store because we only want the variables directly used in the expression. It returns a set, which is what
  942. # dooms us to only ever be "quasi-BFS" rather than full BFS.
  943. new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
  944. queue.extend(new_vars)
  945. seen.update(new_vars)
  946. return ret
  947. def cpu_count():
  948. return multiprocessing.cpu_count()
  949. def nonblockingfd(fd):
  950. fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
  951. def process_profilelog(fn, pout = None):
  952. # Either call with a list of filenames and set pout or a filename and optionally pout.
  953. if not pout:
  954. pout = fn + '.processed'
  955. with open(pout, 'w') as pout:
  956. import pstats
  957. if isinstance(fn, list):
  958. p = pstats.Stats(*fn, stream=pout)
  959. else:
  960. p = pstats.Stats(fn, stream=pout)
  961. p.sort_stats('time')
  962. p.print_stats()
  963. p.print_callers()
  964. p.sort_stats('cumulative')
  965. p.print_stats()
  966. pout.flush()
  967. #
  968. # Was present to work around multiprocessing pool bugs in python < 2.7.3
  969. #
  970. def multiprocessingpool(*args, **kwargs):
  971. import multiprocessing.pool
  972. #import multiprocessing.util
  973. #multiprocessing.util.log_to_stderr(10)
  974. # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
  975. # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
  976. def wrapper(func):
  977. def wrap(self, timeout=None):
  978. return func(self, timeout=timeout if timeout is not None else 1e100)
  979. return wrap
  980. multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
  981. return multiprocessing.Pool(*args, **kwargs)
  982. def exec_flat_python_func(func, *args, **kwargs):
  983. """Execute a flat python function (defined with def funcname(args):...)"""
  984. # Prepare a small piece of python code which calls the requested function
  985. # To do this we need to prepare two things - a set of variables we can use to pass
  986. # the values of arguments into the calling function, and the list of arguments for
  987. # the function being called
  988. context = {}
  989. funcargs = []
  990. # Handle unnamed arguments
  991. aidx = 1
  992. for arg in args:
  993. argname = 'arg_%s' % aidx
  994. context[argname] = arg
  995. funcargs.append(argname)
  996. aidx += 1
  997. # Handle keyword arguments
  998. context.update(kwargs)
  999. funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
  1000. code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
  1001. comp = bb.utils.better_compile(code, '<string>', '<string>')
  1002. bb.utils.better_exec(comp, context, code, '<string>')
  1003. return context['retval']
  1004. def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
  1005. """Edit lines from a recipe or config file and modify one or more
  1006. specified variable values set in the file using a specified callback
  1007. function. Lines are expected to have trailing newlines.
  1008. Parameters:
  1009. meta_lines: lines from the file; can be a list or an iterable
  1010. (e.g. file pointer)
  1011. variables: a list of variable names to look for. Functions
  1012. may also be specified, but must be specified with '()' at
  1013. the end of the name. Note that the function doesn't have
  1014. any intrinsic understanding of _append, _prepend, _remove,
  1015. or overrides, so these are considered as part of the name.
  1016. These values go into a regular expression, so regular
  1017. expression syntax is allowed.
  1018. varfunc: callback function called for every variable matching
  1019. one of the entries in the variables parameter. The function
  1020. should take four arguments:
  1021. varname: name of variable matched
  1022. origvalue: current value in file
  1023. op: the operator (e.g. '+=')
  1024. newlines: list of lines up to this point. You can use
  1025. this to prepend lines before this variable setting
  1026. if you wish.
  1027. and should return a four-element tuple:
  1028. newvalue: new value to substitute in, or None to drop
  1029. the variable setting entirely. (If the removal
  1030. results in two consecutive blank lines, one of the
  1031. blank lines will also be dropped).
  1032. newop: the operator to use - if you specify None here,
  1033. the original operation will be used.
  1034. indent: number of spaces to indent multi-line entries,
  1035. or -1 to indent up to the level of the assignment
  1036. and opening quote, or a string to use as the indent.
  1037. minbreak: True to allow the first element of a
  1038. multi-line value to continue on the same line as
  1039. the assignment, False to indent before the first
  1040. element.
  1041. To clarify, if you wish not to change the value, then you
  1042. would return like this: return origvalue, None, 0, True
  1043. match_overrides: True to match items with _overrides on the end,
  1044. False otherwise
  1045. Returns a tuple:
  1046. updated:
  1047. True if changes were made, False otherwise.
  1048. newlines:
  1049. Lines after processing
  1050. """
  1051. var_res = {}
  1052. if match_overrides:
  1053. override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
  1054. else:
  1055. override_re = ''
  1056. for var in variables:
  1057. if var.endswith('()'):
  1058. var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
  1059. else:
  1060. var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
  1061. updated = False
  1062. varset_start = ''
  1063. varlines = []
  1064. newlines = []
  1065. in_var = None
  1066. full_value = ''
  1067. var_end = ''
  1068. def handle_var_end():
  1069. prerun_newlines = newlines[:]
  1070. op = varset_start[len(in_var):].strip()
  1071. (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
  1072. changed = (prerun_newlines != newlines)
  1073. if newvalue is None:
  1074. # Drop the value
  1075. return True
  1076. elif newvalue != full_value or (newop not in [None, op]):
  1077. if newop not in [None, op]:
  1078. # Callback changed the operator
  1079. varset_new = "%s %s" % (in_var, newop)
  1080. else:
  1081. varset_new = varset_start
  1082. if isinstance(indent, int):
  1083. if indent == -1:
  1084. indentspc = ' ' * (len(varset_new) + 2)
  1085. else:
  1086. indentspc = ' ' * indent
  1087. else:
  1088. indentspc = indent
  1089. if in_var.endswith('()'):
  1090. # A function definition
  1091. if isinstance(newvalue, list):
  1092. newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
  1093. else:
  1094. if not newvalue.startswith('\n'):
  1095. newvalue = '\n' + newvalue
  1096. if not newvalue.endswith('\n'):
  1097. newvalue = newvalue + '\n'
  1098. newlines.append('%s {%s}\n' % (varset_new, newvalue))
  1099. else:
  1100. # Normal variable
  1101. if isinstance(newvalue, list):
  1102. if not newvalue:
  1103. # Empty list -> empty string
  1104. newlines.append('%s ""\n' % varset_new)
  1105. elif minbreak:
  1106. # First item on first line
  1107. if len(newvalue) == 1:
  1108. newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
  1109. else:
  1110. newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
  1111. for item in newvalue[1:]:
  1112. newlines.append('%s%s \\\n' % (indentspc, item))
  1113. newlines.append('%s"\n' % indentspc)
  1114. else:
  1115. # No item on first line
  1116. newlines.append('%s " \\\n' % varset_new)
  1117. for item in newvalue:
  1118. newlines.append('%s%s \\\n' % (indentspc, item))
  1119. newlines.append('%s"\n' % indentspc)
  1120. else:
  1121. newlines.append('%s "%s"\n' % (varset_new, newvalue))
  1122. return True
  1123. else:
  1124. # Put the old lines back where they were
  1125. newlines.extend(varlines)
  1126. # If newlines was touched by the function, we'll need to return True
  1127. return changed
  1128. checkspc = False
  1129. for line in meta_lines:
  1130. if in_var:
  1131. value = line.rstrip()
  1132. varlines.append(line)
  1133. if in_var.endswith('()'):
  1134. full_value += '\n' + value
  1135. else:
  1136. full_value += value[:-1]
  1137. if value.endswith(var_end):
  1138. if in_var.endswith('()'):
  1139. if full_value.count('{') - full_value.count('}') >= 0:
  1140. continue
  1141. full_value = full_value[:-1]
  1142. if handle_var_end():
  1143. updated = True
  1144. checkspc = True
  1145. in_var = None
  1146. else:
  1147. skip = False
  1148. for (varname, var_re) in var_res.items():
  1149. res = var_re.match(line)
  1150. if res:
  1151. isfunc = varname.endswith('()')
  1152. if isfunc:
  1153. splitvalue = line.split('{', 1)
  1154. var_end = '}'
  1155. else:
  1156. var_end = res.groups()[-1]
  1157. splitvalue = line.split(var_end, 1)
  1158. varset_start = splitvalue[0].rstrip()
  1159. value = splitvalue[1].rstrip()
  1160. if not isfunc and value.endswith('\\'):
  1161. value = value[:-1]
  1162. full_value = value
  1163. varlines = [line]
  1164. in_var = res.group(1)
  1165. if isfunc:
  1166. in_var += '()'
  1167. if value.endswith(var_end):
  1168. full_value = full_value[:-1]
  1169. if handle_var_end():
  1170. updated = True
  1171. checkspc = True
  1172. in_var = None
  1173. skip = True
  1174. break
  1175. if not skip:
  1176. if checkspc:
  1177. checkspc = False
  1178. if newlines and newlines[-1] == '\n' and line == '\n':
  1179. # Squash blank line if there are two consecutive blanks after a removal
  1180. continue
  1181. newlines.append(line)
  1182. return (updated, newlines)
  1183. def edit_metadata_file(meta_file, variables, varfunc):
  1184. """Edit a recipe or config file and modify one or more specified
  1185. variable values set in the file using a specified callback function.
  1186. The file is only written to if the value(s) actually change.
  1187. This is basically the file version of edit_metadata(), see that
  1188. function's description for parameter/usage information.
  1189. Returns True if the file was written to, False otherwise.
  1190. """
  1191. with open(meta_file, 'r') as f:
  1192. (updated, newlines) = edit_metadata(f, variables, varfunc)
  1193. if updated:
  1194. with open(meta_file, 'w') as f:
  1195. f.writelines(newlines)
  1196. return updated
  1197. def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
  1198. """Edit bblayers.conf, adding and/or removing layers
  1199. Parameters:
  1200. bblayers_conf: path to bblayers.conf file to edit
  1201. add: layer path (or list of layer paths) to add; None or empty
  1202. list to add nothing
  1203. remove: layer path (or list of layer paths) to remove; None or
  1204. empty list to remove nothing
  1205. edit_cb: optional callback function that will be called after
  1206. processing adds/removes once per existing entry.
  1207. Returns a tuple:
  1208. notadded: list of layers specified to be added but weren't
  1209. (because they were already in the list)
  1210. notremoved: list of layers that were specified to be removed
  1211. but weren't (because they weren't in the list)
  1212. """
  1213. import fnmatch
  1214. def remove_trailing_sep(pth):
  1215. if pth and pth[-1] == os.sep:
  1216. pth = pth[:-1]
  1217. return pth
  1218. approved = bb.utils.approved_variables()
  1219. def canonicalise_path(pth):
  1220. pth = remove_trailing_sep(pth)
  1221. if 'HOME' in approved and '~' in pth:
  1222. pth = os.path.expanduser(pth)
  1223. return pth
  1224. def layerlist_param(value):
  1225. if not value:
  1226. return []
  1227. elif isinstance(value, list):
  1228. return [remove_trailing_sep(x) for x in value]
  1229. else:
  1230. return [remove_trailing_sep(value)]
  1231. addlayers = layerlist_param(add)
  1232. removelayers = layerlist_param(remove)
  1233. # Need to use a list here because we can't set non-local variables from a callback in python 2.x
  1234. bblayercalls = []
  1235. removed = []
  1236. plusequals = False
  1237. orig_bblayers = []
  1238. def handle_bblayers_firstpass(varname, origvalue, op, newlines):
  1239. bblayercalls.append(op)
  1240. if op == '=':
  1241. del orig_bblayers[:]
  1242. orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
  1243. return (origvalue, None, 2, False)
  1244. def handle_bblayers(varname, origvalue, op, newlines):
  1245. updated = False
  1246. bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
  1247. if removelayers:
  1248. for removelayer in removelayers:
  1249. for layer in bblayers:
  1250. if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
  1251. updated = True
  1252. bblayers.remove(layer)
  1253. removed.append(removelayer)
  1254. break
  1255. if addlayers and not plusequals:
  1256. for addlayer in addlayers:
  1257. if addlayer not in bblayers:
  1258. updated = True
  1259. bblayers.append(addlayer)
  1260. del addlayers[:]
  1261. if edit_cb:
  1262. newlist = []
  1263. for layer in bblayers:
  1264. res = edit_cb(layer, canonicalise_path(layer))
  1265. if res != layer:
  1266. newlist.append(res)
  1267. updated = True
  1268. else:
  1269. newlist.append(layer)
  1270. bblayers = newlist
  1271. if updated:
  1272. if op == '+=' and not bblayers:
  1273. bblayers = None
  1274. return (bblayers, None, 2, False)
  1275. else:
  1276. return (origvalue, None, 2, False)
  1277. with open(bblayers_conf, 'r') as f:
  1278. (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
  1279. if not bblayercalls:
  1280. raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
  1281. # Try to do the "smart" thing depending on how the user has laid out
  1282. # their bblayers.conf file
  1283. if bblayercalls.count('+=') > 1:
  1284. plusequals = True
  1285. removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
  1286. notadded = []
  1287. for layer in addlayers:
  1288. layer_canon = canonicalise_path(layer)
  1289. if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
  1290. notadded.append(layer)
  1291. notadded_canon = [canonicalise_path(layer) for layer in notadded]
  1292. addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
  1293. (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
  1294. if addlayers:
  1295. # Still need to add these
  1296. for addlayer in addlayers:
  1297. newlines.append('BBLAYERS += "%s"\n' % addlayer)
  1298. updated = True
  1299. if updated:
  1300. with open(bblayers_conf, 'w') as f:
  1301. f.writelines(newlines)
  1302. notremoved = list(set(removelayers) - set(removed))
  1303. return (notadded, notremoved)
  1304. def get_collection_res(d):
  1305. collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
  1306. collection_res = {}
  1307. for collection in collections:
  1308. collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
  1309. return collection_res
  1310. def get_file_layer(filename, d, collection_res={}):
  1311. """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
  1312. if not collection_res:
  1313. collection_res = get_collection_res(d)
  1314. def path_to_layer(path):
  1315. # Use longest path so we handle nested layers
  1316. matchlen = 0
  1317. match = None
  1318. for collection, regex in collection_res.items():
  1319. if len(regex) > matchlen and re.match(regex, path):
  1320. matchlen = len(regex)
  1321. match = collection
  1322. return match
  1323. result = None
  1324. bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
  1325. bbfilesmatch = False
  1326. for bbfilesentry in bbfiles:
  1327. if fnmatch.fnmatchcase(filename, bbfilesentry):
  1328. bbfilesmatch = True
  1329. result = path_to_layer(bbfilesentry)
  1330. break
  1331. if not bbfilesmatch:
  1332. # Probably a bbclass
  1333. result = path_to_layer(filename)
  1334. return result
  1335. # Constant taken from http://linux.die.net/include/linux/prctl.h
  1336. PR_SET_PDEATHSIG = 1
  1337. class PrCtlError(Exception):
  1338. pass
  1339. def signal_on_parent_exit(signame):
  1340. """
  1341. Trigger signame to be sent when the parent process dies
  1342. """
  1343. signum = getattr(signal, signame)
  1344. # http://linux.die.net/man/2/prctl
  1345. result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
  1346. if result != 0:
  1347. raise PrCtlError('prctl failed with error code %s' % result)
  1348. #
  1349. # Manually call the ioprio syscall. We could depend on other libs like psutil
  1350. # however this gets us enough of what we need to bitbake for now without the
  1351. # dependency
  1352. #
  1353. _unamearch = os.uname()[4]
  1354. IOPRIO_WHO_PROCESS = 1
  1355. IOPRIO_CLASS_SHIFT = 13
  1356. def ioprio_set(who, cls, value):
  1357. NR_ioprio_set = None
  1358. if _unamearch == "x86_64":
  1359. NR_ioprio_set = 251
  1360. elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
  1361. NR_ioprio_set = 289
  1362. elif _unamearch == "aarch64":
  1363. NR_ioprio_set = 30
  1364. if NR_ioprio_set:
  1365. ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
  1366. rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
  1367. if rc != 0:
  1368. raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
  1369. else:
  1370. bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
  1371. def set_process_name(name):
  1372. from ctypes import cdll, byref, create_string_buffer
  1373. # This is nice to have for debugging, not essential
  1374. try:
  1375. libc = cdll.LoadLibrary('libc.so.6')
  1376. buf = create_string_buffer(bytes(name, 'utf-8'))
  1377. libc.prctl(15, byref(buf), 0, 0, 0)
  1378. except:
  1379. pass
  1380. # export common proxies variables from datastore to environment
  1381. def export_proxies(d):
  1382. import os
  1383. variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
  1384. 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
  1385. 'GIT_PROXY_COMMAND']
  1386. exported = False
  1387. for v in variables:
  1388. if v in os.environ.keys():
  1389. exported = True
  1390. else:
  1391. v_proxy = d.getVar(v)
  1392. if v_proxy is not None:
  1393. os.environ[v] = v_proxy
  1394. exported = True
  1395. return exported
  1396. def load_plugins(logger, plugins, pluginpath):
  1397. def load_plugin(name):
  1398. logger.debug(1, 'Loading plugin %s' % name)
  1399. spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
  1400. if spec:
  1401. return spec.loader.load_module()
  1402. logger.debug(1, 'Loading plugins from %s...' % pluginpath)
  1403. expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
  1404. for ext in python_extensions)
  1405. files = itertools.chain.from_iterable(expanded)
  1406. names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
  1407. for name in names:
  1408. if name != '__init__':
  1409. plugin = load_plugin(name)
  1410. if hasattr(plugin, 'plugin_init'):
  1411. obj = plugin.plugin_init(plugins)
  1412. plugins.append(obj or plugin)
  1413. else:
  1414. plugins.append(plugin)
  1415. class LogCatcher(logging.Handler):
  1416. """Logging handler for collecting logged messages so you can check them later"""
  1417. def __init__(self):
  1418. self.messages = []
  1419. logging.Handler.__init__(self, logging.WARNING)
  1420. def emit(self, record):
  1421. self.messages.append(bb.build.logformatter.format(record))
  1422. def contains(self, message):
  1423. return (message in self.messages)
  1424. def is_semver(version):
  1425. """
  1426. Is the version string following the semver semantic?
  1427. https://semver.org/spec/v2.0.0.html
  1428. """
  1429. regex = re.compile(
  1430. r"""
  1431. ^
  1432. (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
  1433. (?:-(
  1434. (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
  1435. (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
  1436. ))?
  1437. (?:\+(
  1438. [0-9a-zA-Z-]+
  1439. (?:\.[0-9a-zA-Z-]+)*
  1440. ))?
  1441. $
  1442. """, re.VERBOSE)
  1443. if regex.match(version) is None:
  1444. return False
  1445. return True