utils.py 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567
  1. # ex:ts=4:sw=4:sts=4:et
  2. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  3. """
  4. BitBake Utility Functions
  5. """
  6. # Copyright (C) 2004 Michael Lauer
  7. #
  8. # SPDX-License-Identifier: GPL-2.0-only
  9. #
  10. import re, fcntl, os, string, stat, shutil, time
  11. import sys
  12. import errno
  13. import logging
  14. import bb
  15. import bb.msg
  16. import multiprocessing
  17. import fcntl
  18. import importlib
  19. from importlib import machinery
  20. import itertools
  21. import subprocess
  22. import glob
  23. import fnmatch
  24. import traceback
  25. import errno
  26. import signal
  27. import ast
  28. import collections
  29. import copy
  30. from subprocess import getstatusoutput
  31. from contextlib import contextmanager
  32. from ctypes import cdll
  33. logger = logging.getLogger("BitBake.Util")
  34. python_extensions = importlib.machinery.all_suffixes()
  35. def clean_context():
  36. return {
  37. "os": os,
  38. "bb": bb,
  39. "time": time,
  40. }
  41. def get_context():
  42. return _context
  43. def set_context(ctx):
  44. _context = ctx
  45. # Context used in better_exec, eval
  46. _context = clean_context()
  47. class VersionStringException(Exception):
  48. """Exception raised when an invalid version specification is found"""
  49. def explode_version(s):
  50. r = []
  51. alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
  52. numeric_regexp = re.compile(r'^(\d+)(.*)$')
  53. while (s != ''):
  54. if s[0] in string.digits:
  55. m = numeric_regexp.match(s)
  56. r.append((0, int(m.group(1))))
  57. s = m.group(2)
  58. continue
  59. if s[0] in string.ascii_letters:
  60. m = alpha_regexp.match(s)
  61. r.append((1, m.group(1)))
  62. s = m.group(2)
  63. continue
  64. if s[0] == '~':
  65. r.append((-1, s[0]))
  66. else:
  67. r.append((2, s[0]))
  68. s = s[1:]
  69. return r
  70. def split_version(s):
  71. """Split a version string into its constituent parts (PE, PV, PR)"""
  72. s = s.strip(" <>=")
  73. e = 0
  74. if s.count(':'):
  75. e = int(s.split(":")[0])
  76. s = s.split(":")[1]
  77. r = ""
  78. if s.count('-'):
  79. r = s.rsplit("-", 1)[1]
  80. s = s.rsplit("-", 1)[0]
  81. v = s
  82. return (e, v, r)
  83. def vercmp_part(a, b):
  84. va = explode_version(a)
  85. vb = explode_version(b)
  86. while True:
  87. if va == []:
  88. (oa, ca) = (0, None)
  89. else:
  90. (oa, ca) = va.pop(0)
  91. if vb == []:
  92. (ob, cb) = (0, None)
  93. else:
  94. (ob, cb) = vb.pop(0)
  95. if (oa, ca) == (0, None) and (ob, cb) == (0, None):
  96. return 0
  97. if oa < ob:
  98. return -1
  99. elif oa > ob:
  100. return 1
  101. elif ca is None:
  102. return -1
  103. elif cb is None:
  104. return 1
  105. elif ca < cb:
  106. return -1
  107. elif ca > cb:
  108. return 1
  109. def vercmp(ta, tb):
  110. (ea, va, ra) = ta
  111. (eb, vb, rb) = tb
  112. r = int(ea or 0) - int(eb or 0)
  113. if (r == 0):
  114. r = vercmp_part(va, vb)
  115. if (r == 0):
  116. r = vercmp_part(ra, rb)
  117. return r
  118. def vercmp_string(a, b):
  119. ta = split_version(a)
  120. tb = split_version(b)
  121. return vercmp(ta, tb)
  122. def vercmp_string_op(a, b, op):
  123. """
  124. Compare two versions and check if the specified comparison operator matches the result of the comparison.
  125. This function is fairly liberal about what operators it will accept since there are a variety of styles
  126. depending on the context.
  127. """
  128. res = vercmp_string(a, b)
  129. if op in ('=', '=='):
  130. return res == 0
  131. elif op == '<=':
  132. return res <= 0
  133. elif op == '>=':
  134. return res >= 0
  135. elif op in ('>', '>>'):
  136. return res > 0
  137. elif op in ('<', '<<'):
  138. return res < 0
  139. elif op == '!=':
  140. return res != 0
  141. else:
  142. raise VersionStringException('Unsupported comparison operator "%s"' % op)
  143. def explode_deps(s):
  144. """
  145. Take an RDEPENDS style string of format:
  146. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  147. and return a list of dependencies.
  148. Version information is ignored.
  149. """
  150. r = []
  151. l = s.split()
  152. flag = False
  153. for i in l:
  154. if i[0] == '(':
  155. flag = True
  156. #j = []
  157. if not flag:
  158. r.append(i)
  159. #else:
  160. # j.append(i)
  161. if flag and i.endswith(')'):
  162. flag = False
  163. # Ignore version
  164. #r[-1] += ' ' + ' '.join(j)
  165. return r
  166. def explode_dep_versions2(s, *, sort=True):
  167. """
  168. Take an RDEPENDS style string of format:
  169. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  170. and return a dictionary of dependencies and versions.
  171. """
  172. r = collections.OrderedDict()
  173. l = s.replace(",", "").split()
  174. lastdep = None
  175. lastcmp = ""
  176. lastver = ""
  177. incmp = False
  178. inversion = False
  179. for i in l:
  180. if i[0] == '(':
  181. incmp = True
  182. i = i[1:].strip()
  183. if not i:
  184. continue
  185. if incmp:
  186. incmp = False
  187. inversion = True
  188. # This list is based on behavior and supported comparisons from deb, opkg and rpm.
  189. #
  190. # Even though =<, <<, ==, !=, =>, and >> may not be supported,
  191. # we list each possibly valid item.
  192. # The build system is responsible for validation of what it supports.
  193. if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
  194. lastcmp = i[0:2]
  195. i = i[2:]
  196. elif i.startswith(('<', '>', '=')):
  197. lastcmp = i[0:1]
  198. i = i[1:]
  199. else:
  200. # This is an unsupported case!
  201. raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
  202. lastcmp = (i or "")
  203. i = ""
  204. i.strip()
  205. if not i:
  206. continue
  207. if inversion:
  208. if i.endswith(')'):
  209. i = i[:-1] or ""
  210. inversion = False
  211. if lastver and i:
  212. lastver += " "
  213. if i:
  214. lastver += i
  215. if lastdep not in r:
  216. r[lastdep] = []
  217. r[lastdep].append(lastcmp + " " + lastver)
  218. continue
  219. #if not inversion:
  220. lastdep = i
  221. lastver = ""
  222. lastcmp = ""
  223. if not (i in r and r[i]):
  224. r[lastdep] = []
  225. if sort:
  226. r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
  227. return r
  228. def explode_dep_versions(s):
  229. r = explode_dep_versions2(s)
  230. for d in r:
  231. if not r[d]:
  232. r[d] = None
  233. continue
  234. if len(r[d]) > 1:
  235. bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
  236. r[d] = r[d][0]
  237. return r
  238. def join_deps(deps, commasep=True):
  239. """
  240. Take the result from explode_dep_versions and generate a dependency string
  241. """
  242. result = []
  243. for dep in deps:
  244. if deps[dep]:
  245. if isinstance(deps[dep], list):
  246. for v in deps[dep]:
  247. result.append(dep + " (" + v + ")")
  248. else:
  249. result.append(dep + " (" + deps[dep] + ")")
  250. else:
  251. result.append(dep)
  252. if commasep:
  253. return ", ".join(result)
  254. else:
  255. return " ".join(result)
  256. def _print_trace(body, line):
  257. """
  258. Print the Environment of a Text Body
  259. """
  260. error = []
  261. # print the environment of the method
  262. min_line = max(1, line-4)
  263. max_line = min(line + 4, len(body))
  264. for i in range(min_line, max_line + 1):
  265. if line == i:
  266. error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
  267. else:
  268. error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
  269. return error
  270. def better_compile(text, file, realfile, mode = "exec", lineno = 0):
  271. """
  272. A better compile method. This method
  273. will print the offending lines.
  274. """
  275. try:
  276. cache = bb.methodpool.compile_cache(text)
  277. if cache:
  278. return cache
  279. # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
  280. text2 = "\n" * int(lineno) + text
  281. code = compile(text2, realfile, mode)
  282. bb.methodpool.compile_cache_add(text, code)
  283. return code
  284. except Exception as e:
  285. error = []
  286. # split the text into lines again
  287. body = text.split('\n')
  288. error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
  289. if hasattr(e, "lineno"):
  290. error.append("The code lines resulting in this error were:")
  291. # e.lineno: line's position in reaflile
  292. # lineno: function name's "position -1" in realfile
  293. # e.lineno - lineno: line's relative position in function
  294. error.extend(_print_trace(body, e.lineno - lineno))
  295. else:
  296. error.append("The function causing this error was:")
  297. for line in body:
  298. error.append(line)
  299. error.append("%s: %s" % (e.__class__.__name__, str(e)))
  300. logger.error("\n".join(error))
  301. e = bb.BBHandledException(e)
  302. raise e
  303. def _print_exception(t, value, tb, realfile, text, context):
  304. error = []
  305. try:
  306. exception = traceback.format_exception_only(t, value)
  307. error.append('Error executing a python function in %s:\n' % realfile)
  308. # Strip 'us' from the stack (better_exec call) unless that was where the
  309. # error came from
  310. if tb.tb_next is not None:
  311. tb = tb.tb_next
  312. textarray = text.split('\n')
  313. linefailed = tb.tb_lineno
  314. tbextract = traceback.extract_tb(tb)
  315. tbformat = traceback.format_list(tbextract)
  316. error.append("The stack trace of python calls that resulted in this exception/failure was:")
  317. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
  318. error.extend(_print_trace(textarray, linefailed))
  319. # See if this is a function we constructed and has calls back into other functions in
  320. # "text". If so, try and improve the context of the error by diving down the trace
  321. level = 0
  322. nexttb = tb.tb_next
  323. while nexttb is not None and (level+1) < len(tbextract):
  324. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
  325. if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
  326. # The code was possibly in the string we compiled ourselves
  327. error.extend(_print_trace(textarray, tbextract[level+1][1]))
  328. elif tbextract[level+1][0].startswith("/"):
  329. # The code looks like it might be in a file, try and load it
  330. try:
  331. with open(tbextract[level+1][0], "r") as f:
  332. text = f.readlines()
  333. error.extend(_print_trace(text, tbextract[level+1][1]))
  334. except:
  335. error.append(tbformat[level+1])
  336. else:
  337. error.append(tbformat[level+1])
  338. nexttb = tb.tb_next
  339. level = level + 1
  340. error.append("Exception: %s" % ''.join(exception))
  341. # If the exception is from spwaning a task, let's be helpful and display
  342. # the output (which hopefully includes stderr).
  343. if isinstance(value, subprocess.CalledProcessError) and value.output:
  344. error.append("Subprocess output:")
  345. error.append(value.output.decode("utf-8", errors="ignore"))
  346. finally:
  347. logger.error("\n".join(error))
  348. def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
  349. """
  350. Similiar to better_compile, better_exec will
  351. print the lines that are responsible for the
  352. error.
  353. """
  354. import bb.parse
  355. if not text:
  356. text = code
  357. if not hasattr(code, "co_filename"):
  358. code = better_compile(code, realfile, realfile)
  359. try:
  360. exec(code, get_context(), context)
  361. except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
  362. # Error already shown so passthrough, no need for traceback
  363. raise
  364. except Exception as e:
  365. if pythonexception:
  366. raise
  367. (t, value, tb) = sys.exc_info()
  368. try:
  369. _print_exception(t, value, tb, realfile, text, context)
  370. except Exception as e:
  371. logger.error("Exception handler error: %s" % str(e))
  372. e = bb.BBHandledException(e)
  373. raise e
  374. def simple_exec(code, context):
  375. exec(code, get_context(), context)
  376. def better_eval(source, locals, extraglobals = None):
  377. ctx = get_context()
  378. if extraglobals:
  379. ctx = copy.copy(ctx)
  380. for g in extraglobals:
  381. ctx[g] = extraglobals[g]
  382. return eval(source, ctx, locals)
  383. @contextmanager
  384. def fileslocked(files):
  385. """Context manager for locking and unlocking file locks."""
  386. locks = []
  387. if files:
  388. for lockfile in files:
  389. locks.append(bb.utils.lockfile(lockfile))
  390. yield
  391. for lock in locks:
  392. bb.utils.unlockfile(lock)
  393. @contextmanager
  394. def timeout(seconds):
  395. def timeout_handler(signum, frame):
  396. pass
  397. original_handler = signal.signal(signal.SIGALRM, timeout_handler)
  398. try:
  399. signal.alarm(seconds)
  400. yield
  401. finally:
  402. signal.alarm(0)
  403. signal.signal(signal.SIGALRM, original_handler)
  404. def lockfile(name, shared=False, retry=True, block=False):
  405. """
  406. Use the specified file as a lock file, return when the lock has
  407. been acquired. Returns a variable to pass to unlockfile().
  408. Parameters:
  409. retry: True to re-try locking if it fails, False otherwise
  410. block: True to block until the lock succeeds, False otherwise
  411. The retry and block parameters are kind of equivalent unless you
  412. consider the possibility of sending a signal to the process to break
  413. out - at which point you want block=True rather than retry=True.
  414. """
  415. dirname = os.path.dirname(name)
  416. mkdirhier(dirname)
  417. if not os.access(dirname, os.W_OK):
  418. logger.error("Unable to acquire lock '%s', directory is not writable",
  419. name)
  420. sys.exit(1)
  421. op = fcntl.LOCK_EX
  422. if shared:
  423. op = fcntl.LOCK_SH
  424. if not retry and not block:
  425. op = op | fcntl.LOCK_NB
  426. while True:
  427. # If we leave the lockfiles lying around there is no problem
  428. # but we should clean up after ourselves. This gives potential
  429. # for races though. To work around this, when we acquire the lock
  430. # we check the file we locked was still the lock file on disk.
  431. # by comparing inode numbers. If they don't match or the lockfile
  432. # no longer exists, we start again.
  433. # This implementation is unfair since the last person to request the
  434. # lock is the most likely to win it.
  435. try:
  436. lf = open(name, 'a+')
  437. fileno = lf.fileno()
  438. fcntl.flock(fileno, op)
  439. statinfo = os.fstat(fileno)
  440. if os.path.exists(lf.name):
  441. statinfo2 = os.stat(lf.name)
  442. if statinfo.st_ino == statinfo2.st_ino:
  443. return lf
  444. lf.close()
  445. except OSError as e:
  446. if e.errno == errno.EACCES:
  447. logger.error("Unable to acquire lock '%s', %s",
  448. e.strerror, name)
  449. sys.exit(1)
  450. try:
  451. lf.close()
  452. except Exception:
  453. pass
  454. pass
  455. if not retry:
  456. return None
  457. def unlockfile(lf):
  458. """
  459. Unlock a file locked using lockfile()
  460. """
  461. try:
  462. # If we had a shared lock, we need to promote to exclusive before
  463. # removing the lockfile. Attempt this, ignore failures.
  464. fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
  465. os.unlink(lf.name)
  466. except (IOError, OSError):
  467. pass
  468. fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
  469. lf.close()
  470. def md5_file(filename):
  471. """
  472. Return the hex string representation of the MD5 checksum of filename.
  473. """
  474. import hashlib, mmap
  475. with open(filename, "rb") as f:
  476. m = hashlib.md5()
  477. try:
  478. with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
  479. for chunk in iter(lambda: mm.read(8192), b''):
  480. m.update(chunk)
  481. except ValueError:
  482. # You can't mmap() an empty file so silence this exception
  483. pass
  484. return m.hexdigest()
  485. def sha256_file(filename):
  486. """
  487. Return the hex string representation of the 256-bit SHA checksum of
  488. filename.
  489. """
  490. import hashlib
  491. s = hashlib.sha256()
  492. with open(filename, "rb") as f:
  493. for line in f:
  494. s.update(line)
  495. return s.hexdigest()
  496. def sha1_file(filename):
  497. """
  498. Return the hex string representation of the SHA1 checksum of the filename
  499. """
  500. import hashlib
  501. s = hashlib.sha1()
  502. with open(filename, "rb") as f:
  503. for line in f:
  504. s.update(line)
  505. return s.hexdigest()
  506. def preserved_envvars_exported():
  507. """Variables which are taken from the environment and placed in and exported
  508. from the metadata"""
  509. return [
  510. 'BB_TASKHASH',
  511. 'HOME',
  512. 'LOGNAME',
  513. 'PATH',
  514. 'PWD',
  515. 'SHELL',
  516. 'TERM',
  517. 'USER',
  518. 'LC_ALL',
  519. 'BBSERVER',
  520. ]
  521. def preserved_envvars():
  522. """Variables which are taken from the environment and placed in the metadata"""
  523. v = [
  524. 'BBPATH',
  525. 'BB_PRESERVE_ENV',
  526. 'BB_ENV_WHITELIST',
  527. 'BB_ENV_EXTRAWHITE',
  528. ]
  529. return v + preserved_envvars_exported()
  530. def filter_environment(good_vars):
  531. """
  532. Create a pristine environment for bitbake. This will remove variables that
  533. are not known and may influence the build in a negative way.
  534. """
  535. removed_vars = {}
  536. for key in list(os.environ):
  537. if key in good_vars:
  538. continue
  539. removed_vars[key] = os.environ[key]
  540. del os.environ[key]
  541. # If we spawn a python process, we need to have a UTF-8 locale, else python's file
  542. # access methods will use ascii. You can't change that mode once the interpreter is
  543. # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
  544. # distros support that and we need to set something.
  545. os.environ["LC_ALL"] = "en_US.UTF-8"
  546. if removed_vars:
  547. logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
  548. return removed_vars
  549. def approved_variables():
  550. """
  551. Determine and return the list of whitelisted variables which are approved
  552. to remain in the environment.
  553. """
  554. if 'BB_PRESERVE_ENV' in os.environ:
  555. return os.environ.keys()
  556. approved = []
  557. if 'BB_ENV_WHITELIST' in os.environ:
  558. approved = os.environ['BB_ENV_WHITELIST'].split()
  559. approved.extend(['BB_ENV_WHITELIST'])
  560. else:
  561. approved = preserved_envvars()
  562. if 'BB_ENV_EXTRAWHITE' in os.environ:
  563. approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
  564. if 'BB_ENV_EXTRAWHITE' not in approved:
  565. approved.extend(['BB_ENV_EXTRAWHITE'])
  566. return approved
  567. def clean_environment():
  568. """
  569. Clean up any spurious environment variables. This will remove any
  570. variables the user hasn't chosen to preserve.
  571. """
  572. if 'BB_PRESERVE_ENV' not in os.environ:
  573. good_vars = approved_variables()
  574. return filter_environment(good_vars)
  575. return {}
  576. def empty_environment():
  577. """
  578. Remove all variables from the environment.
  579. """
  580. for s in list(os.environ.keys()):
  581. os.unsetenv(s)
  582. del os.environ[s]
  583. def build_environment(d):
  584. """
  585. Build an environment from all exported variables.
  586. """
  587. import bb.data
  588. for var in bb.data.keys(d):
  589. export = d.getVarFlag(var, "export", False)
  590. if export:
  591. os.environ[var] = d.getVar(var) or ""
  592. def _check_unsafe_delete_path(path):
  593. """
  594. Basic safeguard against recursively deleting something we shouldn't. If it returns True,
  595. the caller should raise an exception with an appropriate message.
  596. NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
  597. with potentially disastrous results.
  598. """
  599. extra = ''
  600. # HOME might not be /home/something, so in case we can get it, check against it
  601. homedir = os.environ.get('HOME', '')
  602. if homedir:
  603. extra = '|%s' % homedir
  604. if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
  605. return True
  606. return False
  607. def remove(path, recurse=False):
  608. """Equivalent to rm -f or rm -rf"""
  609. if not path:
  610. return
  611. if recurse:
  612. for name in glob.glob(path):
  613. if _check_unsafe_delete_path(path):
  614. raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
  615. # shutil.rmtree(name) would be ideal but its too slow
  616. subprocess.check_call(['rm', '-rf'] + glob.glob(path))
  617. return
  618. for name in glob.glob(path):
  619. try:
  620. os.unlink(name)
  621. except OSError as exc:
  622. if exc.errno != errno.ENOENT:
  623. raise
  624. def prunedir(topdir):
  625. # Delete everything reachable from the directory named in 'topdir'.
  626. # CAUTION: This is dangerous!
  627. if _check_unsafe_delete_path(topdir):
  628. raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
  629. remove(topdir, recurse=True)
  630. #
  631. # Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
  632. # but thats possibly insane and suffixes is probably going to be small
  633. #
  634. def prune_suffix(var, suffixes, d):
  635. # See if var ends with any of the suffixes listed and
  636. # remove it if found
  637. for suffix in suffixes:
  638. if suffix and var.endswith(suffix):
  639. return var[:-len(suffix)]
  640. return var
  641. def mkdirhier(directory):
  642. """Create a directory like 'mkdir -p', but does not complain if
  643. directory already exists like os.makedirs
  644. """
  645. try:
  646. os.makedirs(directory)
  647. except OSError as e:
  648. if e.errno != errno.EEXIST or not os.path.isdir(directory):
  649. raise e
  650. def movefile(src, dest, newmtime = None, sstat = None):
  651. """Moves a file from src to dest, preserving all permissions and
  652. attributes; mtime will be preserved even when moving across
  653. filesystems. Returns true on success and false on failure. Move is
  654. atomic.
  655. """
  656. #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  657. try:
  658. if not sstat:
  659. sstat = os.lstat(src)
  660. except Exception as e:
  661. print("movefile: Stating source file failed...", e)
  662. return None
  663. destexists = 1
  664. try:
  665. dstat = os.lstat(dest)
  666. except:
  667. dstat = os.lstat(os.path.dirname(dest))
  668. destexists = 0
  669. if destexists:
  670. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  671. try:
  672. os.unlink(dest)
  673. destexists = 0
  674. except Exception as e:
  675. pass
  676. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  677. try:
  678. target = os.readlink(src)
  679. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  680. os.unlink(dest)
  681. os.symlink(target, dest)
  682. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  683. os.unlink(src)
  684. return os.lstat(dest)
  685. except Exception as e:
  686. print("movefile: failed to properly create symlink:", dest, "->", target, e)
  687. return None
  688. renamefailed = 1
  689. # os.rename needs to know the dest path ending with file name
  690. # so append the file name to a path only if it's a dir specified
  691. srcfname = os.path.basename(src)
  692. destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
  693. else dest
  694. if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
  695. try:
  696. os.rename(src, destpath)
  697. renamefailed = 0
  698. except Exception as e:
  699. if e[0] != errno.EXDEV:
  700. # Some random error.
  701. print("movefile: Failed to move", src, "to", dest, e)
  702. return None
  703. # Invalid cross-device-link 'bind' mounted or actually Cross-Device
  704. if renamefailed:
  705. didcopy = 0
  706. if stat.S_ISREG(sstat[stat.ST_MODE]):
  707. try: # For safety copy then move it over.
  708. shutil.copyfile(src, destpath + "#new")
  709. os.rename(destpath + "#new", destpath)
  710. didcopy = 1
  711. except Exception as e:
  712. print('movefile: copy', src, '->', dest, 'failed.', e)
  713. return None
  714. else:
  715. #we don't yet handle special, so we need to fall back to /bin/mv
  716. a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
  717. if a[0] != 0:
  718. print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
  719. return None # failure
  720. try:
  721. if didcopy:
  722. os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
  723. os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  724. os.unlink(src)
  725. except Exception as e:
  726. print("movefile: Failed to chown/chmod/unlink", dest, e)
  727. return None
  728. if newmtime:
  729. os.utime(destpath, (newmtime, newmtime))
  730. else:
  731. os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  732. newmtime = sstat[stat.ST_MTIME]
  733. return newmtime
  734. def copyfile(src, dest, newmtime = None, sstat = None):
  735. """
  736. Copies a file from src to dest, preserving all permissions and
  737. attributes; mtime will be preserved even when moving across
  738. filesystems. Returns true on success and false on failure.
  739. """
  740. #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  741. try:
  742. if not sstat:
  743. sstat = os.lstat(src)
  744. except Exception as e:
  745. logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
  746. return False
  747. destexists = 1
  748. try:
  749. dstat = os.lstat(dest)
  750. except:
  751. dstat = os.lstat(os.path.dirname(dest))
  752. destexists = 0
  753. if destexists:
  754. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  755. try:
  756. os.unlink(dest)
  757. destexists = 0
  758. except Exception as e:
  759. pass
  760. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  761. try:
  762. target = os.readlink(src)
  763. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  764. os.unlink(dest)
  765. os.symlink(target, dest)
  766. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  767. return os.lstat(dest)
  768. except Exception as e:
  769. logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
  770. return False
  771. if stat.S_ISREG(sstat[stat.ST_MODE]):
  772. try:
  773. srcchown = False
  774. if not os.access(src, os.R_OK):
  775. # Make sure we can read it
  776. srcchown = True
  777. os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
  778. # For safety copy then move it over.
  779. shutil.copyfile(src, dest + "#new")
  780. os.rename(dest + "#new", dest)
  781. except Exception as e:
  782. logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
  783. return False
  784. finally:
  785. if srcchown:
  786. os.chmod(src, sstat[stat.ST_MODE])
  787. os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  788. else:
  789. #we don't yet handle special, so we need to fall back to /bin/mv
  790. a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
  791. if a[0] != 0:
  792. logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
  793. return False # failure
  794. try:
  795. os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
  796. os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  797. except Exception as e:
  798. logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
  799. return False
  800. if newmtime:
  801. os.utime(dest, (newmtime, newmtime))
  802. else:
  803. os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  804. newmtime = sstat[stat.ST_MTIME]
  805. return newmtime
  806. def break_hardlinks(src, sstat = None):
  807. """
  808. Ensures src is the only hardlink to this file. Other hardlinks,
  809. if any, are not affected (other than in their st_nlink value, of
  810. course). Returns true on success and false on failure.
  811. """
  812. try:
  813. if not sstat:
  814. sstat = os.lstat(src)
  815. except Exception as e:
  816. logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
  817. return False
  818. if sstat[stat.ST_NLINK] == 1:
  819. return True
  820. return copyfile(src, src, sstat=sstat)
  821. def which(path, item, direction = 0, history = False, executable=False):
  822. """
  823. Locate `item` in the list of paths `path` (colon separated string like $PATH).
  824. If `direction` is non-zero then the list is reversed.
  825. If `history` is True then the list of candidates also returned as result,history.
  826. If `executable` is True then the candidate has to be an executable file,
  827. otherwise the candidate simply has to exist.
  828. """
  829. if executable:
  830. is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
  831. else:
  832. is_candidate = lambda p: os.path.exists(p)
  833. hist = []
  834. paths = (path or "").split(':')
  835. if direction != 0:
  836. paths.reverse()
  837. for p in paths:
  838. next = os.path.join(p, item)
  839. hist.append(next)
  840. if is_candidate(next):
  841. if not os.path.isabs(next):
  842. next = os.path.abspath(next)
  843. if history:
  844. return next, hist
  845. return next
  846. if history:
  847. return "", hist
  848. return ""
  849. def to_boolean(string, default=None):
  850. if not string:
  851. return default
  852. normalized = string.lower()
  853. if normalized in ("y", "yes", "1", "true"):
  854. return True
  855. elif normalized in ("n", "no", "0", "false"):
  856. return False
  857. else:
  858. raise ValueError("Invalid value for to_boolean: %s" % string)
  859. def contains(variable, checkvalues, truevalue, falsevalue, d):
  860. """Check if a variable contains all the values specified.
  861. Arguments:
  862. variable -- the variable name. This will be fetched and expanded (using
  863. d.getVar(variable)) and then split into a set().
  864. checkvalues -- if this is a string it is split on whitespace into a set(),
  865. otherwise coerced directly into a set().
  866. truevalue -- the value to return if checkvalues is a subset of variable.
  867. falsevalue -- the value to return if variable is empty or if checkvalues is
  868. not a subset of variable.
  869. d -- the data store.
  870. """
  871. val = d.getVar(variable)
  872. if not val:
  873. return falsevalue
  874. val = set(val.split())
  875. if isinstance(checkvalues, str):
  876. checkvalues = set(checkvalues.split())
  877. else:
  878. checkvalues = set(checkvalues)
  879. if checkvalues.issubset(val):
  880. return truevalue
  881. return falsevalue
  882. def contains_any(variable, checkvalues, truevalue, falsevalue, d):
  883. val = d.getVar(variable)
  884. if not val:
  885. return falsevalue
  886. val = set(val.split())
  887. if isinstance(checkvalues, str):
  888. checkvalues = set(checkvalues.split())
  889. else:
  890. checkvalues = set(checkvalues)
  891. if checkvalues & val:
  892. return truevalue
  893. return falsevalue
  894. def filter(variable, checkvalues, d):
  895. """Return all words in the variable that are present in the checkvalues.
  896. Arguments:
  897. variable -- the variable name. This will be fetched and expanded (using
  898. d.getVar(variable)) and then split into a set().
  899. checkvalues -- if this is a string it is split on whitespace into a set(),
  900. otherwise coerced directly into a set().
  901. d -- the data store.
  902. """
  903. val = d.getVar(variable)
  904. if not val:
  905. return ''
  906. val = set(val.split())
  907. if isinstance(checkvalues, str):
  908. checkvalues = set(checkvalues.split())
  909. else:
  910. checkvalues = set(checkvalues)
  911. return ' '.join(sorted(checkvalues & val))
  912. def cpu_count():
  913. return multiprocessing.cpu_count()
  914. def nonblockingfd(fd):
  915. fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
  916. def process_profilelog(fn, pout = None):
  917. # Either call with a list of filenames and set pout or a filename and optionally pout.
  918. if not pout:
  919. pout = fn + '.processed'
  920. pout = open(pout, 'w')
  921. import pstats
  922. if isinstance(fn, list):
  923. p = pstats.Stats(*fn, stream=pout)
  924. else:
  925. p = pstats.Stats(fn, stream=pout)
  926. p.sort_stats('time')
  927. p.print_stats()
  928. p.print_callers()
  929. p.sort_stats('cumulative')
  930. p.print_stats()
  931. pout.flush()
  932. pout.close()
  933. #
  934. # Was present to work around multiprocessing pool bugs in python < 2.7.3
  935. #
  936. def multiprocessingpool(*args, **kwargs):
  937. import multiprocessing.pool
  938. #import multiprocessing.util
  939. #multiprocessing.util.log_to_stderr(10)
  940. # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
  941. # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
  942. def wrapper(func):
  943. def wrap(self, timeout=None):
  944. return func(self, timeout=timeout if timeout is not None else 1e100)
  945. return wrap
  946. multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
  947. return multiprocessing.Pool(*args, **kwargs)
  948. def exec_flat_python_func(func, *args, **kwargs):
  949. """Execute a flat python function (defined with def funcname(args):...)"""
  950. # Prepare a small piece of python code which calls the requested function
  951. # To do this we need to prepare two things - a set of variables we can use to pass
  952. # the values of arguments into the calling function, and the list of arguments for
  953. # the function being called
  954. context = {}
  955. funcargs = []
  956. # Handle unnamed arguments
  957. aidx = 1
  958. for arg in args:
  959. argname = 'arg_%s' % aidx
  960. context[argname] = arg
  961. funcargs.append(argname)
  962. aidx += 1
  963. # Handle keyword arguments
  964. context.update(kwargs)
  965. funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
  966. code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
  967. comp = bb.utils.better_compile(code, '<string>', '<string>')
  968. bb.utils.better_exec(comp, context, code, '<string>')
  969. return context['retval']
  970. def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
  971. """Edit lines from a recipe or config file and modify one or more
  972. specified variable values set in the file using a specified callback
  973. function. Lines are expected to have trailing newlines.
  974. Parameters:
  975. meta_lines: lines from the file; can be a list or an iterable
  976. (e.g. file pointer)
  977. variables: a list of variable names to look for. Functions
  978. may also be specified, but must be specified with '()' at
  979. the end of the name. Note that the function doesn't have
  980. any intrinsic understanding of _append, _prepend, _remove,
  981. or overrides, so these are considered as part of the name.
  982. These values go into a regular expression, so regular
  983. expression syntax is allowed.
  984. varfunc: callback function called for every variable matching
  985. one of the entries in the variables parameter. The function
  986. should take four arguments:
  987. varname: name of variable matched
  988. origvalue: current value in file
  989. op: the operator (e.g. '+=')
  990. newlines: list of lines up to this point. You can use
  991. this to prepend lines before this variable setting
  992. if you wish.
  993. and should return a four-element tuple:
  994. newvalue: new value to substitute in, or None to drop
  995. the variable setting entirely. (If the removal
  996. results in two consecutive blank lines, one of the
  997. blank lines will also be dropped).
  998. newop: the operator to use - if you specify None here,
  999. the original operation will be used.
  1000. indent: number of spaces to indent multi-line entries,
  1001. or -1 to indent up to the level of the assignment
  1002. and opening quote, or a string to use as the indent.
  1003. minbreak: True to allow the first element of a
  1004. multi-line value to continue on the same line as
  1005. the assignment, False to indent before the first
  1006. element.
  1007. To clarify, if you wish not to change the value, then you
  1008. would return like this: return origvalue, None, 0, True
  1009. match_overrides: True to match items with _overrides on the end,
  1010. False otherwise
  1011. Returns a tuple:
  1012. updated:
  1013. True if changes were made, False otherwise.
  1014. newlines:
  1015. Lines after processing
  1016. """
  1017. var_res = {}
  1018. if match_overrides:
  1019. override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
  1020. else:
  1021. override_re = ''
  1022. for var in variables:
  1023. if var.endswith('()'):
  1024. var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
  1025. else:
  1026. var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
  1027. updated = False
  1028. varset_start = ''
  1029. varlines = []
  1030. newlines = []
  1031. in_var = None
  1032. full_value = ''
  1033. var_end = ''
  1034. def handle_var_end():
  1035. prerun_newlines = newlines[:]
  1036. op = varset_start[len(in_var):].strip()
  1037. (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
  1038. changed = (prerun_newlines != newlines)
  1039. if newvalue is None:
  1040. # Drop the value
  1041. return True
  1042. elif newvalue != full_value or (newop not in [None, op]):
  1043. if newop not in [None, op]:
  1044. # Callback changed the operator
  1045. varset_new = "%s %s" % (in_var, newop)
  1046. else:
  1047. varset_new = varset_start
  1048. if isinstance(indent, int):
  1049. if indent == -1:
  1050. indentspc = ' ' * (len(varset_new) + 2)
  1051. else:
  1052. indentspc = ' ' * indent
  1053. else:
  1054. indentspc = indent
  1055. if in_var.endswith('()'):
  1056. # A function definition
  1057. if isinstance(newvalue, list):
  1058. newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
  1059. else:
  1060. if not newvalue.startswith('\n'):
  1061. newvalue = '\n' + newvalue
  1062. if not newvalue.endswith('\n'):
  1063. newvalue = newvalue + '\n'
  1064. newlines.append('%s {%s}\n' % (varset_new, newvalue))
  1065. else:
  1066. # Normal variable
  1067. if isinstance(newvalue, list):
  1068. if not newvalue:
  1069. # Empty list -> empty string
  1070. newlines.append('%s ""\n' % varset_new)
  1071. elif minbreak:
  1072. # First item on first line
  1073. if len(newvalue) == 1:
  1074. newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
  1075. else:
  1076. newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
  1077. for item in newvalue[1:]:
  1078. newlines.append('%s%s \\\n' % (indentspc, item))
  1079. newlines.append('%s"\n' % indentspc)
  1080. else:
  1081. # No item on first line
  1082. newlines.append('%s " \\\n' % varset_new)
  1083. for item in newvalue:
  1084. newlines.append('%s%s \\\n' % (indentspc, item))
  1085. newlines.append('%s"\n' % indentspc)
  1086. else:
  1087. newlines.append('%s "%s"\n' % (varset_new, newvalue))
  1088. return True
  1089. else:
  1090. # Put the old lines back where they were
  1091. newlines.extend(varlines)
  1092. # If newlines was touched by the function, we'll need to return True
  1093. return changed
  1094. checkspc = False
  1095. for line in meta_lines:
  1096. if in_var:
  1097. value = line.rstrip()
  1098. varlines.append(line)
  1099. if in_var.endswith('()'):
  1100. full_value += '\n' + value
  1101. else:
  1102. full_value += value[:-1]
  1103. if value.endswith(var_end):
  1104. if in_var.endswith('()'):
  1105. if full_value.count('{') - full_value.count('}') >= 0:
  1106. continue
  1107. full_value = full_value[:-1]
  1108. if handle_var_end():
  1109. updated = True
  1110. checkspc = True
  1111. in_var = None
  1112. else:
  1113. skip = False
  1114. for (varname, var_re) in var_res.items():
  1115. res = var_re.match(line)
  1116. if res:
  1117. isfunc = varname.endswith('()')
  1118. if isfunc:
  1119. splitvalue = line.split('{', 1)
  1120. var_end = '}'
  1121. else:
  1122. var_end = res.groups()[-1]
  1123. splitvalue = line.split(var_end, 1)
  1124. varset_start = splitvalue[0].rstrip()
  1125. value = splitvalue[1].rstrip()
  1126. if not isfunc and value.endswith('\\'):
  1127. value = value[:-1]
  1128. full_value = value
  1129. varlines = [line]
  1130. in_var = res.group(1)
  1131. if isfunc:
  1132. in_var += '()'
  1133. if value.endswith(var_end):
  1134. full_value = full_value[:-1]
  1135. if handle_var_end():
  1136. updated = True
  1137. checkspc = True
  1138. in_var = None
  1139. skip = True
  1140. break
  1141. if not skip:
  1142. if checkspc:
  1143. checkspc = False
  1144. if newlines and newlines[-1] == '\n' and line == '\n':
  1145. # Squash blank line if there are two consecutive blanks after a removal
  1146. continue
  1147. newlines.append(line)
  1148. return (updated, newlines)
  1149. def edit_metadata_file(meta_file, variables, varfunc):
  1150. """Edit a recipe or config file and modify one or more specified
  1151. variable values set in the file using a specified callback function.
  1152. The file is only written to if the value(s) actually change.
  1153. This is basically the file version of edit_metadata(), see that
  1154. function's description for parameter/usage information.
  1155. Returns True if the file was written to, False otherwise.
  1156. """
  1157. with open(meta_file, 'r') as f:
  1158. (updated, newlines) = edit_metadata(f, variables, varfunc)
  1159. if updated:
  1160. with open(meta_file, 'w') as f:
  1161. f.writelines(newlines)
  1162. return updated
  1163. def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
  1164. """Edit bblayers.conf, adding and/or removing layers
  1165. Parameters:
  1166. bblayers_conf: path to bblayers.conf file to edit
  1167. add: layer path (or list of layer paths) to add; None or empty
  1168. list to add nothing
  1169. remove: layer path (or list of layer paths) to remove; None or
  1170. empty list to remove nothing
  1171. edit_cb: optional callback function that will be called after
  1172. processing adds/removes once per existing entry.
  1173. Returns a tuple:
  1174. notadded: list of layers specified to be added but weren't
  1175. (because they were already in the list)
  1176. notremoved: list of layers that were specified to be removed
  1177. but weren't (because they weren't in the list)
  1178. """
  1179. import fnmatch
  1180. def remove_trailing_sep(pth):
  1181. if pth and pth[-1] == os.sep:
  1182. pth = pth[:-1]
  1183. return pth
  1184. approved = bb.utils.approved_variables()
  1185. def canonicalise_path(pth):
  1186. pth = remove_trailing_sep(pth)
  1187. if 'HOME' in approved and '~' in pth:
  1188. pth = os.path.expanduser(pth)
  1189. return pth
  1190. def layerlist_param(value):
  1191. if not value:
  1192. return []
  1193. elif isinstance(value, list):
  1194. return [remove_trailing_sep(x) for x in value]
  1195. else:
  1196. return [remove_trailing_sep(value)]
  1197. addlayers = layerlist_param(add)
  1198. removelayers = layerlist_param(remove)
  1199. # Need to use a list here because we can't set non-local variables from a callback in python 2.x
  1200. bblayercalls = []
  1201. removed = []
  1202. plusequals = False
  1203. orig_bblayers = []
  1204. def handle_bblayers_firstpass(varname, origvalue, op, newlines):
  1205. bblayercalls.append(op)
  1206. if op == '=':
  1207. del orig_bblayers[:]
  1208. orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
  1209. return (origvalue, None, 2, False)
  1210. def handle_bblayers(varname, origvalue, op, newlines):
  1211. updated = False
  1212. bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
  1213. if removelayers:
  1214. for removelayer in removelayers:
  1215. for layer in bblayers:
  1216. if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
  1217. updated = True
  1218. bblayers.remove(layer)
  1219. removed.append(removelayer)
  1220. break
  1221. if addlayers and not plusequals:
  1222. for addlayer in addlayers:
  1223. if addlayer not in bblayers:
  1224. updated = True
  1225. bblayers.append(addlayer)
  1226. del addlayers[:]
  1227. if edit_cb:
  1228. newlist = []
  1229. for layer in bblayers:
  1230. res = edit_cb(layer, canonicalise_path(layer))
  1231. if res != layer:
  1232. newlist.append(res)
  1233. updated = True
  1234. else:
  1235. newlist.append(layer)
  1236. bblayers = newlist
  1237. if updated:
  1238. if op == '+=' and not bblayers:
  1239. bblayers = None
  1240. return (bblayers, None, 2, False)
  1241. else:
  1242. return (origvalue, None, 2, False)
  1243. with open(bblayers_conf, 'r') as f:
  1244. (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
  1245. if not bblayercalls:
  1246. raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
  1247. # Try to do the "smart" thing depending on how the user has laid out
  1248. # their bblayers.conf file
  1249. if bblayercalls.count('+=') > 1:
  1250. plusequals = True
  1251. removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
  1252. notadded = []
  1253. for layer in addlayers:
  1254. layer_canon = canonicalise_path(layer)
  1255. if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
  1256. notadded.append(layer)
  1257. notadded_canon = [canonicalise_path(layer) for layer in notadded]
  1258. addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
  1259. (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
  1260. if addlayers:
  1261. # Still need to add these
  1262. for addlayer in addlayers:
  1263. newlines.append('BBLAYERS += "%s"\n' % addlayer)
  1264. updated = True
  1265. if updated:
  1266. with open(bblayers_conf, 'w') as f:
  1267. f.writelines(newlines)
  1268. notremoved = list(set(removelayers) - set(removed))
  1269. return (notadded, notremoved)
  1270. def get_file_layer(filename, d):
  1271. """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
  1272. collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
  1273. collection_res = {}
  1274. for collection in collections:
  1275. collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
  1276. def path_to_layer(path):
  1277. # Use longest path so we handle nested layers
  1278. matchlen = 0
  1279. match = None
  1280. for collection, regex in collection_res.items():
  1281. if len(regex) > matchlen and re.match(regex, path):
  1282. matchlen = len(regex)
  1283. match = collection
  1284. return match
  1285. result = None
  1286. bbfiles = (d.getVar('BBFILES') or '').split()
  1287. bbfilesmatch = False
  1288. for bbfilesentry in bbfiles:
  1289. if fnmatch.fnmatch(filename, bbfilesentry):
  1290. bbfilesmatch = True
  1291. result = path_to_layer(bbfilesentry)
  1292. if not bbfilesmatch:
  1293. # Probably a bbclass
  1294. result = path_to_layer(filename)
  1295. return result
  1296. # Constant taken from http://linux.die.net/include/linux/prctl.h
  1297. PR_SET_PDEATHSIG = 1
  1298. class PrCtlError(Exception):
  1299. pass
  1300. def signal_on_parent_exit(signame):
  1301. """
  1302. Trigger signame to be sent when the parent process dies
  1303. """
  1304. signum = getattr(signal, signame)
  1305. # http://linux.die.net/man/2/prctl
  1306. result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
  1307. if result != 0:
  1308. raise PrCtlError('prctl failed with error code %s' % result)
  1309. #
  1310. # Manually call the ioprio syscall. We could depend on other libs like psutil
  1311. # however this gets us enough of what we need to bitbake for now without the
  1312. # dependency
  1313. #
  1314. _unamearch = os.uname()[4]
  1315. IOPRIO_WHO_PROCESS = 1
  1316. IOPRIO_CLASS_SHIFT = 13
  1317. def ioprio_set(who, cls, value):
  1318. NR_ioprio_set = None
  1319. if _unamearch == "x86_64":
  1320. NR_ioprio_set = 251
  1321. elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
  1322. NR_ioprio_set = 289
  1323. elif _unamearch == "aarch64":
  1324. NR_ioprio_set = 30
  1325. if NR_ioprio_set:
  1326. ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
  1327. rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
  1328. if rc != 0:
  1329. raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
  1330. else:
  1331. bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
  1332. def set_process_name(name):
  1333. from ctypes import cdll, byref, create_string_buffer
  1334. # This is nice to have for debugging, not essential
  1335. try:
  1336. libc = cdll.LoadLibrary('libc.so.6')
  1337. buf = create_string_buffer(bytes(name, 'utf-8'))
  1338. libc.prctl(15, byref(buf), 0, 0, 0)
  1339. except:
  1340. pass
  1341. # export common proxies variables from datastore to environment
  1342. def export_proxies(d):
  1343. import os
  1344. variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
  1345. 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
  1346. 'GIT_PROXY_COMMAND']
  1347. exported = False
  1348. for v in variables:
  1349. if v in os.environ.keys():
  1350. exported = True
  1351. else:
  1352. v_proxy = d.getVar(v)
  1353. if v_proxy is not None:
  1354. os.environ[v] = v_proxy
  1355. exported = True
  1356. return exported
  1357. def load_plugins(logger, plugins, pluginpath):
  1358. def load_plugin(name):
  1359. logger.debug(1, 'Loading plugin %s' % name)
  1360. spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
  1361. if spec:
  1362. return spec.loader.load_module()
  1363. logger.debug(1, 'Loading plugins from %s...' % pluginpath)
  1364. expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
  1365. for ext in python_extensions)
  1366. files = itertools.chain.from_iterable(expanded)
  1367. names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
  1368. for name in names:
  1369. if name != '__init__':
  1370. plugin = load_plugin(name)
  1371. if hasattr(plugin, 'plugin_init'):
  1372. obj = plugin.plugin_init(plugins)
  1373. plugins.append(obj or plugin)
  1374. else:
  1375. plugins.append(plugin)
  1376. class LogCatcher(logging.Handler):
  1377. """Logging handler for collecting logged messages so you can check them later"""
  1378. def __init__(self):
  1379. self.messages = []
  1380. logging.Handler.__init__(self, logging.WARNING)
  1381. def emit(self, record):
  1382. self.messages.append(bb.build.logformatter.format(record))
  1383. def contains(self, message):
  1384. return (message in self.messages)