utils.py 52 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565
  1. """
  2. BitBake Utility Functions
  3. """
  4. # Copyright (C) 2004 Michael Lauer
  5. #
  6. # SPDX-License-Identifier: GPL-2.0-only
  7. #
  8. import re, fcntl, os, string, stat, shutil, time
  9. import sys
  10. import errno
  11. import logging
  12. import bb
  13. import bb.msg
  14. import multiprocessing
  15. import fcntl
  16. import importlib
  17. from importlib import machinery
  18. import itertools
  19. import subprocess
  20. import glob
  21. import fnmatch
  22. import traceback
  23. import errno
  24. import signal
  25. import ast
  26. import collections
  27. import copy
  28. from subprocess import getstatusoutput
  29. from contextlib import contextmanager
  30. from ctypes import cdll
  31. logger = logging.getLogger("BitBake.Util")
  32. python_extensions = importlib.machinery.all_suffixes()
  33. def clean_context():
  34. return {
  35. "os": os,
  36. "bb": bb,
  37. "time": time,
  38. }
  39. def get_context():
  40. return _context
  41. def set_context(ctx):
  42. _context = ctx
  43. # Context used in better_exec, eval
  44. _context = clean_context()
  45. class VersionStringException(Exception):
  46. """Exception raised when an invalid version specification is found"""
  47. def explode_version(s):
  48. r = []
  49. alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
  50. numeric_regexp = re.compile(r'^(\d+)(.*)$')
  51. while (s != ''):
  52. if s[0] in string.digits:
  53. m = numeric_regexp.match(s)
  54. r.append((0, int(m.group(1))))
  55. s = m.group(2)
  56. continue
  57. if s[0] in string.ascii_letters:
  58. m = alpha_regexp.match(s)
  59. r.append((1, m.group(1)))
  60. s = m.group(2)
  61. continue
  62. if s[0] == '~':
  63. r.append((-1, s[0]))
  64. else:
  65. r.append((2, s[0]))
  66. s = s[1:]
  67. return r
  68. def split_version(s):
  69. """Split a version string into its constituent parts (PE, PV, PR)"""
  70. s = s.strip(" <>=")
  71. e = 0
  72. if s.count(':'):
  73. e = int(s.split(":")[0])
  74. s = s.split(":")[1]
  75. r = ""
  76. if s.count('-'):
  77. r = s.rsplit("-", 1)[1]
  78. s = s.rsplit("-", 1)[0]
  79. v = s
  80. return (e, v, r)
  81. def vercmp_part(a, b):
  82. va = explode_version(a)
  83. vb = explode_version(b)
  84. while True:
  85. if va == []:
  86. (oa, ca) = (0, None)
  87. else:
  88. (oa, ca) = va.pop(0)
  89. if vb == []:
  90. (ob, cb) = (0, None)
  91. else:
  92. (ob, cb) = vb.pop(0)
  93. if (oa, ca) == (0, None) and (ob, cb) == (0, None):
  94. return 0
  95. if oa < ob:
  96. return -1
  97. elif oa > ob:
  98. return 1
  99. elif ca is None:
  100. return -1
  101. elif cb is None:
  102. return 1
  103. elif ca < cb:
  104. return -1
  105. elif ca > cb:
  106. return 1
  107. def vercmp(ta, tb):
  108. (ea, va, ra) = ta
  109. (eb, vb, rb) = tb
  110. r = int(ea or 0) - int(eb or 0)
  111. if (r == 0):
  112. r = vercmp_part(va, vb)
  113. if (r == 0):
  114. r = vercmp_part(ra, rb)
  115. return r
  116. def vercmp_string(a, b):
  117. ta = split_version(a)
  118. tb = split_version(b)
  119. return vercmp(ta, tb)
  120. def vercmp_string_op(a, b, op):
  121. """
  122. Compare two versions and check if the specified comparison operator matches the result of the comparison.
  123. This function is fairly liberal about what operators it will accept since there are a variety of styles
  124. depending on the context.
  125. """
  126. res = vercmp_string(a, b)
  127. if op in ('=', '=='):
  128. return res == 0
  129. elif op == '<=':
  130. return res <= 0
  131. elif op == '>=':
  132. return res >= 0
  133. elif op in ('>', '>>'):
  134. return res > 0
  135. elif op in ('<', '<<'):
  136. return res < 0
  137. elif op == '!=':
  138. return res != 0
  139. else:
  140. raise VersionStringException('Unsupported comparison operator "%s"' % op)
  141. def explode_deps(s):
  142. """
  143. Take an RDEPENDS style string of format:
  144. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  145. and return a list of dependencies.
  146. Version information is ignored.
  147. """
  148. r = []
  149. l = s.split()
  150. flag = False
  151. for i in l:
  152. if i[0] == '(':
  153. flag = True
  154. #j = []
  155. if not flag:
  156. r.append(i)
  157. #else:
  158. # j.append(i)
  159. if flag and i.endswith(')'):
  160. flag = False
  161. # Ignore version
  162. #r[-1] += ' ' + ' '.join(j)
  163. return r
  164. def explode_dep_versions2(s, *, sort=True):
  165. """
  166. Take an RDEPENDS style string of format:
  167. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  168. and return a dictionary of dependencies and versions.
  169. """
  170. r = collections.OrderedDict()
  171. l = s.replace(",", "").split()
  172. lastdep = None
  173. lastcmp = ""
  174. lastver = ""
  175. incmp = False
  176. inversion = False
  177. for i in l:
  178. if i[0] == '(':
  179. incmp = True
  180. i = i[1:].strip()
  181. if not i:
  182. continue
  183. if incmp:
  184. incmp = False
  185. inversion = True
  186. # This list is based on behavior and supported comparisons from deb, opkg and rpm.
  187. #
  188. # Even though =<, <<, ==, !=, =>, and >> may not be supported,
  189. # we list each possibly valid item.
  190. # The build system is responsible for validation of what it supports.
  191. if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
  192. lastcmp = i[0:2]
  193. i = i[2:]
  194. elif i.startswith(('<', '>', '=')):
  195. lastcmp = i[0:1]
  196. i = i[1:]
  197. else:
  198. # This is an unsupported case!
  199. raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
  200. lastcmp = (i or "")
  201. i = ""
  202. i.strip()
  203. if not i:
  204. continue
  205. if inversion:
  206. if i.endswith(')'):
  207. i = i[:-1] or ""
  208. inversion = False
  209. if lastver and i:
  210. lastver += " "
  211. if i:
  212. lastver += i
  213. if lastdep not in r:
  214. r[lastdep] = []
  215. r[lastdep].append(lastcmp + " " + lastver)
  216. continue
  217. #if not inversion:
  218. lastdep = i
  219. lastver = ""
  220. lastcmp = ""
  221. if not (i in r and r[i]):
  222. r[lastdep] = []
  223. if sort:
  224. r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
  225. return r
  226. def explode_dep_versions(s):
  227. r = explode_dep_versions2(s)
  228. for d in r:
  229. if not r[d]:
  230. r[d] = None
  231. continue
  232. if len(r[d]) > 1:
  233. bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
  234. r[d] = r[d][0]
  235. return r
  236. def join_deps(deps, commasep=True):
  237. """
  238. Take the result from explode_dep_versions and generate a dependency string
  239. """
  240. result = []
  241. for dep in deps:
  242. if deps[dep]:
  243. if isinstance(deps[dep], list):
  244. for v in deps[dep]:
  245. result.append(dep + " (" + v + ")")
  246. else:
  247. result.append(dep + " (" + deps[dep] + ")")
  248. else:
  249. result.append(dep)
  250. if commasep:
  251. return ", ".join(result)
  252. else:
  253. return " ".join(result)
  254. def _print_trace(body, line):
  255. """
  256. Print the Environment of a Text Body
  257. """
  258. error = []
  259. # print the environment of the method
  260. min_line = max(1, line-4)
  261. max_line = min(line + 4, len(body))
  262. for i in range(min_line, max_line + 1):
  263. if line == i:
  264. error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
  265. else:
  266. error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
  267. return error
  268. def better_compile(text, file, realfile, mode = "exec", lineno = 0):
  269. """
  270. A better compile method. This method
  271. will print the offending lines.
  272. """
  273. try:
  274. cache = bb.methodpool.compile_cache(text)
  275. if cache:
  276. return cache
  277. # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
  278. text2 = "\n" * int(lineno) + text
  279. code = compile(text2, realfile, mode)
  280. bb.methodpool.compile_cache_add(text, code)
  281. return code
  282. except Exception as e:
  283. error = []
  284. # split the text into lines again
  285. body = text.split('\n')
  286. error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
  287. if hasattr(e, "lineno"):
  288. error.append("The code lines resulting in this error were:")
  289. # e.lineno: line's position in reaflile
  290. # lineno: function name's "position -1" in realfile
  291. # e.lineno - lineno: line's relative position in function
  292. error.extend(_print_trace(body, e.lineno - lineno))
  293. else:
  294. error.append("The function causing this error was:")
  295. for line in body:
  296. error.append(line)
  297. error.append("%s: %s" % (e.__class__.__name__, str(e)))
  298. logger.error("\n".join(error))
  299. e = bb.BBHandledException(e)
  300. raise e
  301. def _print_exception(t, value, tb, realfile, text, context):
  302. error = []
  303. try:
  304. exception = traceback.format_exception_only(t, value)
  305. error.append('Error executing a python function in %s:\n' % realfile)
  306. # Strip 'us' from the stack (better_exec call) unless that was where the
  307. # error came from
  308. if tb.tb_next is not None:
  309. tb = tb.tb_next
  310. textarray = text.split('\n')
  311. linefailed = tb.tb_lineno
  312. tbextract = traceback.extract_tb(tb)
  313. tbformat = traceback.format_list(tbextract)
  314. error.append("The stack trace of python calls that resulted in this exception/failure was:")
  315. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
  316. error.extend(_print_trace(textarray, linefailed))
  317. # See if this is a function we constructed and has calls back into other functions in
  318. # "text". If so, try and improve the context of the error by diving down the trace
  319. level = 0
  320. nexttb = tb.tb_next
  321. while nexttb is not None and (level+1) < len(tbextract):
  322. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
  323. if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
  324. # The code was possibly in the string we compiled ourselves
  325. error.extend(_print_trace(textarray, tbextract[level+1][1]))
  326. elif tbextract[level+1][0].startswith("/"):
  327. # The code looks like it might be in a file, try and load it
  328. try:
  329. with open(tbextract[level+1][0], "r") as f:
  330. text = f.readlines()
  331. error.extend(_print_trace(text, tbextract[level+1][1]))
  332. except:
  333. error.append(tbformat[level+1])
  334. else:
  335. error.append(tbformat[level+1])
  336. nexttb = tb.tb_next
  337. level = level + 1
  338. error.append("Exception: %s" % ''.join(exception))
  339. # If the exception is from spwaning a task, let's be helpful and display
  340. # the output (which hopefully includes stderr).
  341. if isinstance(value, subprocess.CalledProcessError) and value.output:
  342. error.append("Subprocess output:")
  343. error.append(value.output.decode("utf-8", errors="ignore"))
  344. finally:
  345. logger.error("\n".join(error))
  346. def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
  347. """
  348. Similiar to better_compile, better_exec will
  349. print the lines that are responsible for the
  350. error.
  351. """
  352. import bb.parse
  353. if not text:
  354. text = code
  355. if not hasattr(code, "co_filename"):
  356. code = better_compile(code, realfile, realfile)
  357. try:
  358. exec(code, get_context(), context)
  359. except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
  360. # Error already shown so passthrough, no need for traceback
  361. raise
  362. except Exception as e:
  363. if pythonexception:
  364. raise
  365. (t, value, tb) = sys.exc_info()
  366. try:
  367. _print_exception(t, value, tb, realfile, text, context)
  368. except Exception as e:
  369. logger.error("Exception handler error: %s" % str(e))
  370. e = bb.BBHandledException(e)
  371. raise e
  372. def simple_exec(code, context):
  373. exec(code, get_context(), context)
  374. def better_eval(source, locals, extraglobals = None):
  375. ctx = get_context()
  376. if extraglobals:
  377. ctx = copy.copy(ctx)
  378. for g in extraglobals:
  379. ctx[g] = extraglobals[g]
  380. return eval(source, ctx, locals)
  381. @contextmanager
  382. def fileslocked(files):
  383. """Context manager for locking and unlocking file locks."""
  384. locks = []
  385. if files:
  386. for lockfile in files:
  387. locks.append(bb.utils.lockfile(lockfile))
  388. yield
  389. for lock in locks:
  390. bb.utils.unlockfile(lock)
  391. @contextmanager
  392. def timeout(seconds):
  393. def timeout_handler(signum, frame):
  394. pass
  395. original_handler = signal.signal(signal.SIGALRM, timeout_handler)
  396. try:
  397. signal.alarm(seconds)
  398. yield
  399. finally:
  400. signal.alarm(0)
  401. signal.signal(signal.SIGALRM, original_handler)
  402. def lockfile(name, shared=False, retry=True, block=False):
  403. """
  404. Use the specified file as a lock file, return when the lock has
  405. been acquired. Returns a variable to pass to unlockfile().
  406. Parameters:
  407. retry: True to re-try locking if it fails, False otherwise
  408. block: True to block until the lock succeeds, False otherwise
  409. The retry and block parameters are kind of equivalent unless you
  410. consider the possibility of sending a signal to the process to break
  411. out - at which point you want block=True rather than retry=True.
  412. """
  413. dirname = os.path.dirname(name)
  414. mkdirhier(dirname)
  415. if not os.access(dirname, os.W_OK):
  416. logger.error("Unable to acquire lock '%s', directory is not writable",
  417. name)
  418. sys.exit(1)
  419. op = fcntl.LOCK_EX
  420. if shared:
  421. op = fcntl.LOCK_SH
  422. if not retry and not block:
  423. op = op | fcntl.LOCK_NB
  424. while True:
  425. # If we leave the lockfiles lying around there is no problem
  426. # but we should clean up after ourselves. This gives potential
  427. # for races though. To work around this, when we acquire the lock
  428. # we check the file we locked was still the lock file on disk.
  429. # by comparing inode numbers. If they don't match or the lockfile
  430. # no longer exists, we start again.
  431. # This implementation is unfair since the last person to request the
  432. # lock is the most likely to win it.
  433. try:
  434. lf = open(name, 'a+')
  435. fileno = lf.fileno()
  436. fcntl.flock(fileno, op)
  437. statinfo = os.fstat(fileno)
  438. if os.path.exists(lf.name):
  439. statinfo2 = os.stat(lf.name)
  440. if statinfo.st_ino == statinfo2.st_ino:
  441. return lf
  442. lf.close()
  443. except OSError as e:
  444. if e.errno == errno.EACCES:
  445. logger.error("Unable to acquire lock '%s', %s",
  446. e.strerror, name)
  447. sys.exit(1)
  448. try:
  449. lf.close()
  450. except Exception:
  451. pass
  452. pass
  453. if not retry:
  454. return None
  455. def unlockfile(lf):
  456. """
  457. Unlock a file locked using lockfile()
  458. """
  459. try:
  460. # If we had a shared lock, we need to promote to exclusive before
  461. # removing the lockfile. Attempt this, ignore failures.
  462. fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
  463. os.unlink(lf.name)
  464. except (IOError, OSError):
  465. pass
  466. fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
  467. lf.close()
  468. def md5_file(filename):
  469. """
  470. Return the hex string representation of the MD5 checksum of filename.
  471. """
  472. import hashlib, mmap
  473. with open(filename, "rb") as f:
  474. m = hashlib.md5()
  475. try:
  476. with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
  477. for chunk in iter(lambda: mm.read(8192), b''):
  478. m.update(chunk)
  479. except ValueError:
  480. # You can't mmap() an empty file so silence this exception
  481. pass
  482. return m.hexdigest()
  483. def sha256_file(filename):
  484. """
  485. Return the hex string representation of the 256-bit SHA checksum of
  486. filename.
  487. """
  488. import hashlib
  489. s = hashlib.sha256()
  490. with open(filename, "rb") as f:
  491. for line in f:
  492. s.update(line)
  493. return s.hexdigest()
  494. def sha1_file(filename):
  495. """
  496. Return the hex string representation of the SHA1 checksum of the filename
  497. """
  498. import hashlib
  499. s = hashlib.sha1()
  500. with open(filename, "rb") as f:
  501. for line in f:
  502. s.update(line)
  503. return s.hexdigest()
  504. def preserved_envvars_exported():
  505. """Variables which are taken from the environment and placed in and exported
  506. from the metadata"""
  507. return [
  508. 'BB_TASKHASH',
  509. 'HOME',
  510. 'LOGNAME',
  511. 'PATH',
  512. 'PWD',
  513. 'SHELL',
  514. 'TERM',
  515. 'USER',
  516. 'LC_ALL',
  517. 'BBSERVER',
  518. ]
  519. def preserved_envvars():
  520. """Variables which are taken from the environment and placed in the metadata"""
  521. v = [
  522. 'BBPATH',
  523. 'BB_PRESERVE_ENV',
  524. 'BB_ENV_WHITELIST',
  525. 'BB_ENV_EXTRAWHITE',
  526. ]
  527. return v + preserved_envvars_exported()
  528. def filter_environment(good_vars):
  529. """
  530. Create a pristine environment for bitbake. This will remove variables that
  531. are not known and may influence the build in a negative way.
  532. """
  533. removed_vars = {}
  534. for key in list(os.environ):
  535. if key in good_vars:
  536. continue
  537. removed_vars[key] = os.environ[key]
  538. del os.environ[key]
  539. # If we spawn a python process, we need to have a UTF-8 locale, else python's file
  540. # access methods will use ascii. You can't change that mode once the interpreter is
  541. # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
  542. # distros support that and we need to set something.
  543. os.environ["LC_ALL"] = "en_US.UTF-8"
  544. if removed_vars:
  545. logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
  546. return removed_vars
  547. def approved_variables():
  548. """
  549. Determine and return the list of whitelisted variables which are approved
  550. to remain in the environment.
  551. """
  552. if 'BB_PRESERVE_ENV' in os.environ:
  553. return os.environ.keys()
  554. approved = []
  555. if 'BB_ENV_WHITELIST' in os.environ:
  556. approved = os.environ['BB_ENV_WHITELIST'].split()
  557. approved.extend(['BB_ENV_WHITELIST'])
  558. else:
  559. approved = preserved_envvars()
  560. if 'BB_ENV_EXTRAWHITE' in os.environ:
  561. approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
  562. if 'BB_ENV_EXTRAWHITE' not in approved:
  563. approved.extend(['BB_ENV_EXTRAWHITE'])
  564. return approved
  565. def clean_environment():
  566. """
  567. Clean up any spurious environment variables. This will remove any
  568. variables the user hasn't chosen to preserve.
  569. """
  570. if 'BB_PRESERVE_ENV' not in os.environ:
  571. good_vars = approved_variables()
  572. return filter_environment(good_vars)
  573. return {}
  574. def empty_environment():
  575. """
  576. Remove all variables from the environment.
  577. """
  578. for s in list(os.environ.keys()):
  579. os.unsetenv(s)
  580. del os.environ[s]
  581. def build_environment(d):
  582. """
  583. Build an environment from all exported variables.
  584. """
  585. import bb.data
  586. for var in bb.data.keys(d):
  587. export = d.getVarFlag(var, "export", False)
  588. if export:
  589. os.environ[var] = d.getVar(var) or ""
  590. def _check_unsafe_delete_path(path):
  591. """
  592. Basic safeguard against recursively deleting something we shouldn't. If it returns True,
  593. the caller should raise an exception with an appropriate message.
  594. NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
  595. with potentially disastrous results.
  596. """
  597. extra = ''
  598. # HOME might not be /home/something, so in case we can get it, check against it
  599. homedir = os.environ.get('HOME', '')
  600. if homedir:
  601. extra = '|%s' % homedir
  602. if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
  603. return True
  604. return False
  605. def remove(path, recurse=False):
  606. """Equivalent to rm -f or rm -rf"""
  607. if not path:
  608. return
  609. if recurse:
  610. for name in glob.glob(path):
  611. if _check_unsafe_delete_path(path):
  612. raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
  613. # shutil.rmtree(name) would be ideal but its too slow
  614. subprocess.check_call(['rm', '-rf'] + glob.glob(path))
  615. return
  616. for name in glob.glob(path):
  617. try:
  618. os.unlink(name)
  619. except OSError as exc:
  620. if exc.errno != errno.ENOENT:
  621. raise
  622. def prunedir(topdir):
  623. # Delete everything reachable from the directory named in 'topdir'.
  624. # CAUTION: This is dangerous!
  625. if _check_unsafe_delete_path(topdir):
  626. raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
  627. remove(topdir, recurse=True)
  628. #
  629. # Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
  630. # but thats possibly insane and suffixes is probably going to be small
  631. #
  632. def prune_suffix(var, suffixes, d):
  633. # See if var ends with any of the suffixes listed and
  634. # remove it if found
  635. for suffix in suffixes:
  636. if suffix and var.endswith(suffix):
  637. return var[:-len(suffix)]
  638. return var
  639. def mkdirhier(directory):
  640. """Create a directory like 'mkdir -p', but does not complain if
  641. directory already exists like os.makedirs
  642. """
  643. try:
  644. os.makedirs(directory)
  645. except OSError as e:
  646. if e.errno != errno.EEXIST or not os.path.isdir(directory):
  647. raise e
  648. def movefile(src, dest, newmtime = None, sstat = None):
  649. """Moves a file from src to dest, preserving all permissions and
  650. attributes; mtime will be preserved even when moving across
  651. filesystems. Returns true on success and false on failure. Move is
  652. atomic.
  653. """
  654. #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  655. try:
  656. if not sstat:
  657. sstat = os.lstat(src)
  658. except Exception as e:
  659. print("movefile: Stating source file failed...", e)
  660. return None
  661. destexists = 1
  662. try:
  663. dstat = os.lstat(dest)
  664. except:
  665. dstat = os.lstat(os.path.dirname(dest))
  666. destexists = 0
  667. if destexists:
  668. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  669. try:
  670. os.unlink(dest)
  671. destexists = 0
  672. except Exception as e:
  673. pass
  674. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  675. try:
  676. target = os.readlink(src)
  677. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  678. os.unlink(dest)
  679. os.symlink(target, dest)
  680. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  681. os.unlink(src)
  682. return os.lstat(dest)
  683. except Exception as e:
  684. print("movefile: failed to properly create symlink:", dest, "->", target, e)
  685. return None
  686. renamefailed = 1
  687. # os.rename needs to know the dest path ending with file name
  688. # so append the file name to a path only if it's a dir specified
  689. srcfname = os.path.basename(src)
  690. destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
  691. else dest
  692. if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
  693. try:
  694. os.rename(src, destpath)
  695. renamefailed = 0
  696. except Exception as e:
  697. if e[0] != errno.EXDEV:
  698. # Some random error.
  699. print("movefile: Failed to move", src, "to", dest, e)
  700. return None
  701. # Invalid cross-device-link 'bind' mounted or actually Cross-Device
  702. if renamefailed:
  703. didcopy = 0
  704. if stat.S_ISREG(sstat[stat.ST_MODE]):
  705. try: # For safety copy then move it over.
  706. shutil.copyfile(src, destpath + "#new")
  707. os.rename(destpath + "#new", destpath)
  708. didcopy = 1
  709. except Exception as e:
  710. print('movefile: copy', src, '->', dest, 'failed.', e)
  711. return None
  712. else:
  713. #we don't yet handle special, so we need to fall back to /bin/mv
  714. a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
  715. if a[0] != 0:
  716. print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
  717. return None # failure
  718. try:
  719. if didcopy:
  720. os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
  721. os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  722. os.unlink(src)
  723. except Exception as e:
  724. print("movefile: Failed to chown/chmod/unlink", dest, e)
  725. return None
  726. if newmtime:
  727. os.utime(destpath, (newmtime, newmtime))
  728. else:
  729. os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  730. newmtime = sstat[stat.ST_MTIME]
  731. return newmtime
  732. def copyfile(src, dest, newmtime = None, sstat = None):
  733. """
  734. Copies a file from src to dest, preserving all permissions and
  735. attributes; mtime will be preserved even when moving across
  736. filesystems. Returns true on success and false on failure.
  737. """
  738. #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  739. try:
  740. if not sstat:
  741. sstat = os.lstat(src)
  742. except Exception as e:
  743. logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
  744. return False
  745. destexists = 1
  746. try:
  747. dstat = os.lstat(dest)
  748. except:
  749. dstat = os.lstat(os.path.dirname(dest))
  750. destexists = 0
  751. if destexists:
  752. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  753. try:
  754. os.unlink(dest)
  755. destexists = 0
  756. except Exception as e:
  757. pass
  758. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  759. try:
  760. target = os.readlink(src)
  761. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  762. os.unlink(dest)
  763. os.symlink(target, dest)
  764. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  765. return os.lstat(dest)
  766. except Exception as e:
  767. logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
  768. return False
  769. if stat.S_ISREG(sstat[stat.ST_MODE]):
  770. try:
  771. srcchown = False
  772. if not os.access(src, os.R_OK):
  773. # Make sure we can read it
  774. srcchown = True
  775. os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
  776. # For safety copy then move it over.
  777. shutil.copyfile(src, dest + "#new")
  778. os.rename(dest + "#new", dest)
  779. except Exception as e:
  780. logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
  781. return False
  782. finally:
  783. if srcchown:
  784. os.chmod(src, sstat[stat.ST_MODE])
  785. os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  786. else:
  787. #we don't yet handle special, so we need to fall back to /bin/mv
  788. a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
  789. if a[0] != 0:
  790. logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
  791. return False # failure
  792. try:
  793. os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
  794. os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  795. except Exception as e:
  796. logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
  797. return False
  798. if newmtime:
  799. os.utime(dest, (newmtime, newmtime))
  800. else:
  801. os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  802. newmtime = sstat[stat.ST_MTIME]
  803. return newmtime
  804. def break_hardlinks(src, sstat = None):
  805. """
  806. Ensures src is the only hardlink to this file. Other hardlinks,
  807. if any, are not affected (other than in their st_nlink value, of
  808. course). Returns true on success and false on failure.
  809. """
  810. try:
  811. if not sstat:
  812. sstat = os.lstat(src)
  813. except Exception as e:
  814. logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
  815. return False
  816. if sstat[stat.ST_NLINK] == 1:
  817. return True
  818. return copyfile(src, src, sstat=sstat)
  819. def which(path, item, direction = 0, history = False, executable=False):
  820. """
  821. Locate `item` in the list of paths `path` (colon separated string like $PATH).
  822. If `direction` is non-zero then the list is reversed.
  823. If `history` is True then the list of candidates also returned as result,history.
  824. If `executable` is True then the candidate has to be an executable file,
  825. otherwise the candidate simply has to exist.
  826. """
  827. if executable:
  828. is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
  829. else:
  830. is_candidate = lambda p: os.path.exists(p)
  831. hist = []
  832. paths = (path or "").split(':')
  833. if direction != 0:
  834. paths.reverse()
  835. for p in paths:
  836. next = os.path.join(p, item)
  837. hist.append(next)
  838. if is_candidate(next):
  839. if not os.path.isabs(next):
  840. next = os.path.abspath(next)
  841. if history:
  842. return next, hist
  843. return next
  844. if history:
  845. return "", hist
  846. return ""
  847. def to_boolean(string, default=None):
  848. if not string:
  849. return default
  850. normalized = string.lower()
  851. if normalized in ("y", "yes", "1", "true"):
  852. return True
  853. elif normalized in ("n", "no", "0", "false"):
  854. return False
  855. else:
  856. raise ValueError("Invalid value for to_boolean: %s" % string)
  857. def contains(variable, checkvalues, truevalue, falsevalue, d):
  858. """Check if a variable contains all the values specified.
  859. Arguments:
  860. variable -- the variable name. This will be fetched and expanded (using
  861. d.getVar(variable)) and then split into a set().
  862. checkvalues -- if this is a string it is split on whitespace into a set(),
  863. otherwise coerced directly into a set().
  864. truevalue -- the value to return if checkvalues is a subset of variable.
  865. falsevalue -- the value to return if variable is empty or if checkvalues is
  866. not a subset of variable.
  867. d -- the data store.
  868. """
  869. val = d.getVar(variable)
  870. if not val:
  871. return falsevalue
  872. val = set(val.split())
  873. if isinstance(checkvalues, str):
  874. checkvalues = set(checkvalues.split())
  875. else:
  876. checkvalues = set(checkvalues)
  877. if checkvalues.issubset(val):
  878. return truevalue
  879. return falsevalue
  880. def contains_any(variable, checkvalues, truevalue, falsevalue, d):
  881. val = d.getVar(variable)
  882. if not val:
  883. return falsevalue
  884. val = set(val.split())
  885. if isinstance(checkvalues, str):
  886. checkvalues = set(checkvalues.split())
  887. else:
  888. checkvalues = set(checkvalues)
  889. if checkvalues & val:
  890. return truevalue
  891. return falsevalue
  892. def filter(variable, checkvalues, d):
  893. """Return all words in the variable that are present in the checkvalues.
  894. Arguments:
  895. variable -- the variable name. This will be fetched and expanded (using
  896. d.getVar(variable)) and then split into a set().
  897. checkvalues -- if this is a string it is split on whitespace into a set(),
  898. otherwise coerced directly into a set().
  899. d -- the data store.
  900. """
  901. val = d.getVar(variable)
  902. if not val:
  903. return ''
  904. val = set(val.split())
  905. if isinstance(checkvalues, str):
  906. checkvalues = set(checkvalues.split())
  907. else:
  908. checkvalues = set(checkvalues)
  909. return ' '.join(sorted(checkvalues & val))
  910. def cpu_count():
  911. return multiprocessing.cpu_count()
  912. def nonblockingfd(fd):
  913. fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
  914. def process_profilelog(fn, pout = None):
  915. # Either call with a list of filenames and set pout or a filename and optionally pout.
  916. if not pout:
  917. pout = fn + '.processed'
  918. pout = open(pout, 'w')
  919. import pstats
  920. if isinstance(fn, list):
  921. p = pstats.Stats(*fn, stream=pout)
  922. else:
  923. p = pstats.Stats(fn, stream=pout)
  924. p.sort_stats('time')
  925. p.print_stats()
  926. p.print_callers()
  927. p.sort_stats('cumulative')
  928. p.print_stats()
  929. pout.flush()
  930. pout.close()
  931. #
  932. # Was present to work around multiprocessing pool bugs in python < 2.7.3
  933. #
  934. def multiprocessingpool(*args, **kwargs):
  935. import multiprocessing.pool
  936. #import multiprocessing.util
  937. #multiprocessing.util.log_to_stderr(10)
  938. # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
  939. # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
  940. def wrapper(func):
  941. def wrap(self, timeout=None):
  942. return func(self, timeout=timeout if timeout is not None else 1e100)
  943. return wrap
  944. multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
  945. return multiprocessing.Pool(*args, **kwargs)
  946. def exec_flat_python_func(func, *args, **kwargs):
  947. """Execute a flat python function (defined with def funcname(args):...)"""
  948. # Prepare a small piece of python code which calls the requested function
  949. # To do this we need to prepare two things - a set of variables we can use to pass
  950. # the values of arguments into the calling function, and the list of arguments for
  951. # the function being called
  952. context = {}
  953. funcargs = []
  954. # Handle unnamed arguments
  955. aidx = 1
  956. for arg in args:
  957. argname = 'arg_%s' % aidx
  958. context[argname] = arg
  959. funcargs.append(argname)
  960. aidx += 1
  961. # Handle keyword arguments
  962. context.update(kwargs)
  963. funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
  964. code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
  965. comp = bb.utils.better_compile(code, '<string>', '<string>')
  966. bb.utils.better_exec(comp, context, code, '<string>')
  967. return context['retval']
  968. def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
  969. """Edit lines from a recipe or config file and modify one or more
  970. specified variable values set in the file using a specified callback
  971. function. Lines are expected to have trailing newlines.
  972. Parameters:
  973. meta_lines: lines from the file; can be a list or an iterable
  974. (e.g. file pointer)
  975. variables: a list of variable names to look for. Functions
  976. may also be specified, but must be specified with '()' at
  977. the end of the name. Note that the function doesn't have
  978. any intrinsic understanding of _append, _prepend, _remove,
  979. or overrides, so these are considered as part of the name.
  980. These values go into a regular expression, so regular
  981. expression syntax is allowed.
  982. varfunc: callback function called for every variable matching
  983. one of the entries in the variables parameter. The function
  984. should take four arguments:
  985. varname: name of variable matched
  986. origvalue: current value in file
  987. op: the operator (e.g. '+=')
  988. newlines: list of lines up to this point. You can use
  989. this to prepend lines before this variable setting
  990. if you wish.
  991. and should return a four-element tuple:
  992. newvalue: new value to substitute in, or None to drop
  993. the variable setting entirely. (If the removal
  994. results in two consecutive blank lines, one of the
  995. blank lines will also be dropped).
  996. newop: the operator to use - if you specify None here,
  997. the original operation will be used.
  998. indent: number of spaces to indent multi-line entries,
  999. or -1 to indent up to the level of the assignment
  1000. and opening quote, or a string to use as the indent.
  1001. minbreak: True to allow the first element of a
  1002. multi-line value to continue on the same line as
  1003. the assignment, False to indent before the first
  1004. element.
  1005. To clarify, if you wish not to change the value, then you
  1006. would return like this: return origvalue, None, 0, True
  1007. match_overrides: True to match items with _overrides on the end,
  1008. False otherwise
  1009. Returns a tuple:
  1010. updated:
  1011. True if changes were made, False otherwise.
  1012. newlines:
  1013. Lines after processing
  1014. """
  1015. var_res = {}
  1016. if match_overrides:
  1017. override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
  1018. else:
  1019. override_re = ''
  1020. for var in variables:
  1021. if var.endswith('()'):
  1022. var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
  1023. else:
  1024. var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
  1025. updated = False
  1026. varset_start = ''
  1027. varlines = []
  1028. newlines = []
  1029. in_var = None
  1030. full_value = ''
  1031. var_end = ''
  1032. def handle_var_end():
  1033. prerun_newlines = newlines[:]
  1034. op = varset_start[len(in_var):].strip()
  1035. (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
  1036. changed = (prerun_newlines != newlines)
  1037. if newvalue is None:
  1038. # Drop the value
  1039. return True
  1040. elif newvalue != full_value or (newop not in [None, op]):
  1041. if newop not in [None, op]:
  1042. # Callback changed the operator
  1043. varset_new = "%s %s" % (in_var, newop)
  1044. else:
  1045. varset_new = varset_start
  1046. if isinstance(indent, int):
  1047. if indent == -1:
  1048. indentspc = ' ' * (len(varset_new) + 2)
  1049. else:
  1050. indentspc = ' ' * indent
  1051. else:
  1052. indentspc = indent
  1053. if in_var.endswith('()'):
  1054. # A function definition
  1055. if isinstance(newvalue, list):
  1056. newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
  1057. else:
  1058. if not newvalue.startswith('\n'):
  1059. newvalue = '\n' + newvalue
  1060. if not newvalue.endswith('\n'):
  1061. newvalue = newvalue + '\n'
  1062. newlines.append('%s {%s}\n' % (varset_new, newvalue))
  1063. else:
  1064. # Normal variable
  1065. if isinstance(newvalue, list):
  1066. if not newvalue:
  1067. # Empty list -> empty string
  1068. newlines.append('%s ""\n' % varset_new)
  1069. elif minbreak:
  1070. # First item on first line
  1071. if len(newvalue) == 1:
  1072. newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
  1073. else:
  1074. newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
  1075. for item in newvalue[1:]:
  1076. newlines.append('%s%s \\\n' % (indentspc, item))
  1077. newlines.append('%s"\n' % indentspc)
  1078. else:
  1079. # No item on first line
  1080. newlines.append('%s " \\\n' % varset_new)
  1081. for item in newvalue:
  1082. newlines.append('%s%s \\\n' % (indentspc, item))
  1083. newlines.append('%s"\n' % indentspc)
  1084. else:
  1085. newlines.append('%s "%s"\n' % (varset_new, newvalue))
  1086. return True
  1087. else:
  1088. # Put the old lines back where they were
  1089. newlines.extend(varlines)
  1090. # If newlines was touched by the function, we'll need to return True
  1091. return changed
  1092. checkspc = False
  1093. for line in meta_lines:
  1094. if in_var:
  1095. value = line.rstrip()
  1096. varlines.append(line)
  1097. if in_var.endswith('()'):
  1098. full_value += '\n' + value
  1099. else:
  1100. full_value += value[:-1]
  1101. if value.endswith(var_end):
  1102. if in_var.endswith('()'):
  1103. if full_value.count('{') - full_value.count('}') >= 0:
  1104. continue
  1105. full_value = full_value[:-1]
  1106. if handle_var_end():
  1107. updated = True
  1108. checkspc = True
  1109. in_var = None
  1110. else:
  1111. skip = False
  1112. for (varname, var_re) in var_res.items():
  1113. res = var_re.match(line)
  1114. if res:
  1115. isfunc = varname.endswith('()')
  1116. if isfunc:
  1117. splitvalue = line.split('{', 1)
  1118. var_end = '}'
  1119. else:
  1120. var_end = res.groups()[-1]
  1121. splitvalue = line.split(var_end, 1)
  1122. varset_start = splitvalue[0].rstrip()
  1123. value = splitvalue[1].rstrip()
  1124. if not isfunc and value.endswith('\\'):
  1125. value = value[:-1]
  1126. full_value = value
  1127. varlines = [line]
  1128. in_var = res.group(1)
  1129. if isfunc:
  1130. in_var += '()'
  1131. if value.endswith(var_end):
  1132. full_value = full_value[:-1]
  1133. if handle_var_end():
  1134. updated = True
  1135. checkspc = True
  1136. in_var = None
  1137. skip = True
  1138. break
  1139. if not skip:
  1140. if checkspc:
  1141. checkspc = False
  1142. if newlines and newlines[-1] == '\n' and line == '\n':
  1143. # Squash blank line if there are two consecutive blanks after a removal
  1144. continue
  1145. newlines.append(line)
  1146. return (updated, newlines)
  1147. def edit_metadata_file(meta_file, variables, varfunc):
  1148. """Edit a recipe or config file and modify one or more specified
  1149. variable values set in the file using a specified callback function.
  1150. The file is only written to if the value(s) actually change.
  1151. This is basically the file version of edit_metadata(), see that
  1152. function's description for parameter/usage information.
  1153. Returns True if the file was written to, False otherwise.
  1154. """
  1155. with open(meta_file, 'r') as f:
  1156. (updated, newlines) = edit_metadata(f, variables, varfunc)
  1157. if updated:
  1158. with open(meta_file, 'w') as f:
  1159. f.writelines(newlines)
  1160. return updated
  1161. def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
  1162. """Edit bblayers.conf, adding and/or removing layers
  1163. Parameters:
  1164. bblayers_conf: path to bblayers.conf file to edit
  1165. add: layer path (or list of layer paths) to add; None or empty
  1166. list to add nothing
  1167. remove: layer path (or list of layer paths) to remove; None or
  1168. empty list to remove nothing
  1169. edit_cb: optional callback function that will be called after
  1170. processing adds/removes once per existing entry.
  1171. Returns a tuple:
  1172. notadded: list of layers specified to be added but weren't
  1173. (because they were already in the list)
  1174. notremoved: list of layers that were specified to be removed
  1175. but weren't (because they weren't in the list)
  1176. """
  1177. import fnmatch
  1178. def remove_trailing_sep(pth):
  1179. if pth and pth[-1] == os.sep:
  1180. pth = pth[:-1]
  1181. return pth
  1182. approved = bb.utils.approved_variables()
  1183. def canonicalise_path(pth):
  1184. pth = remove_trailing_sep(pth)
  1185. if 'HOME' in approved and '~' in pth:
  1186. pth = os.path.expanduser(pth)
  1187. return pth
  1188. def layerlist_param(value):
  1189. if not value:
  1190. return []
  1191. elif isinstance(value, list):
  1192. return [remove_trailing_sep(x) for x in value]
  1193. else:
  1194. return [remove_trailing_sep(value)]
  1195. addlayers = layerlist_param(add)
  1196. removelayers = layerlist_param(remove)
  1197. # Need to use a list here because we can't set non-local variables from a callback in python 2.x
  1198. bblayercalls = []
  1199. removed = []
  1200. plusequals = False
  1201. orig_bblayers = []
  1202. def handle_bblayers_firstpass(varname, origvalue, op, newlines):
  1203. bblayercalls.append(op)
  1204. if op == '=':
  1205. del orig_bblayers[:]
  1206. orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
  1207. return (origvalue, None, 2, False)
  1208. def handle_bblayers(varname, origvalue, op, newlines):
  1209. updated = False
  1210. bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
  1211. if removelayers:
  1212. for removelayer in removelayers:
  1213. for layer in bblayers:
  1214. if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
  1215. updated = True
  1216. bblayers.remove(layer)
  1217. removed.append(removelayer)
  1218. break
  1219. if addlayers and not plusequals:
  1220. for addlayer in addlayers:
  1221. if addlayer not in bblayers:
  1222. updated = True
  1223. bblayers.append(addlayer)
  1224. del addlayers[:]
  1225. if edit_cb:
  1226. newlist = []
  1227. for layer in bblayers:
  1228. res = edit_cb(layer, canonicalise_path(layer))
  1229. if res != layer:
  1230. newlist.append(res)
  1231. updated = True
  1232. else:
  1233. newlist.append(layer)
  1234. bblayers = newlist
  1235. if updated:
  1236. if op == '+=' and not bblayers:
  1237. bblayers = None
  1238. return (bblayers, None, 2, False)
  1239. else:
  1240. return (origvalue, None, 2, False)
  1241. with open(bblayers_conf, 'r') as f:
  1242. (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
  1243. if not bblayercalls:
  1244. raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
  1245. # Try to do the "smart" thing depending on how the user has laid out
  1246. # their bblayers.conf file
  1247. if bblayercalls.count('+=') > 1:
  1248. plusequals = True
  1249. removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
  1250. notadded = []
  1251. for layer in addlayers:
  1252. layer_canon = canonicalise_path(layer)
  1253. if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
  1254. notadded.append(layer)
  1255. notadded_canon = [canonicalise_path(layer) for layer in notadded]
  1256. addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
  1257. (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
  1258. if addlayers:
  1259. # Still need to add these
  1260. for addlayer in addlayers:
  1261. newlines.append('BBLAYERS += "%s"\n' % addlayer)
  1262. updated = True
  1263. if updated:
  1264. with open(bblayers_conf, 'w') as f:
  1265. f.writelines(newlines)
  1266. notremoved = list(set(removelayers) - set(removed))
  1267. return (notadded, notremoved)
  1268. def get_file_layer(filename, d):
  1269. """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
  1270. collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
  1271. collection_res = {}
  1272. for collection in collections:
  1273. collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
  1274. def path_to_layer(path):
  1275. # Use longest path so we handle nested layers
  1276. matchlen = 0
  1277. match = None
  1278. for collection, regex in collection_res.items():
  1279. if len(regex) > matchlen and re.match(regex, path):
  1280. matchlen = len(regex)
  1281. match = collection
  1282. return match
  1283. result = None
  1284. bbfiles = (d.getVar('BBFILES') or '').split()
  1285. bbfilesmatch = False
  1286. for bbfilesentry in bbfiles:
  1287. if fnmatch.fnmatch(filename, bbfilesentry):
  1288. bbfilesmatch = True
  1289. result = path_to_layer(bbfilesentry)
  1290. if not bbfilesmatch:
  1291. # Probably a bbclass
  1292. result = path_to_layer(filename)
  1293. return result
  1294. # Constant taken from http://linux.die.net/include/linux/prctl.h
  1295. PR_SET_PDEATHSIG = 1
  1296. class PrCtlError(Exception):
  1297. pass
  1298. def signal_on_parent_exit(signame):
  1299. """
  1300. Trigger signame to be sent when the parent process dies
  1301. """
  1302. signum = getattr(signal, signame)
  1303. # http://linux.die.net/man/2/prctl
  1304. result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
  1305. if result != 0:
  1306. raise PrCtlError('prctl failed with error code %s' % result)
  1307. #
  1308. # Manually call the ioprio syscall. We could depend on other libs like psutil
  1309. # however this gets us enough of what we need to bitbake for now without the
  1310. # dependency
  1311. #
  1312. _unamearch = os.uname()[4]
  1313. IOPRIO_WHO_PROCESS = 1
  1314. IOPRIO_CLASS_SHIFT = 13
  1315. def ioprio_set(who, cls, value):
  1316. NR_ioprio_set = None
  1317. if _unamearch == "x86_64":
  1318. NR_ioprio_set = 251
  1319. elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
  1320. NR_ioprio_set = 289
  1321. elif _unamearch == "aarch64":
  1322. NR_ioprio_set = 30
  1323. if NR_ioprio_set:
  1324. ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
  1325. rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
  1326. if rc != 0:
  1327. raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
  1328. else:
  1329. bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
  1330. def set_process_name(name):
  1331. from ctypes import cdll, byref, create_string_buffer
  1332. # This is nice to have for debugging, not essential
  1333. try:
  1334. libc = cdll.LoadLibrary('libc.so.6')
  1335. buf = create_string_buffer(bytes(name, 'utf-8'))
  1336. libc.prctl(15, byref(buf), 0, 0, 0)
  1337. except:
  1338. pass
  1339. # export common proxies variables from datastore to environment
  1340. def export_proxies(d):
  1341. import os
  1342. variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
  1343. 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
  1344. 'GIT_PROXY_COMMAND']
  1345. exported = False
  1346. for v in variables:
  1347. if v in os.environ.keys():
  1348. exported = True
  1349. else:
  1350. v_proxy = d.getVar(v)
  1351. if v_proxy is not None:
  1352. os.environ[v] = v_proxy
  1353. exported = True
  1354. return exported
  1355. def load_plugins(logger, plugins, pluginpath):
  1356. def load_plugin(name):
  1357. logger.debug(1, 'Loading plugin %s' % name)
  1358. spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
  1359. if spec:
  1360. return spec.loader.load_module()
  1361. logger.debug(1, 'Loading plugins from %s...' % pluginpath)
  1362. expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
  1363. for ext in python_extensions)
  1364. files = itertools.chain.from_iterable(expanded)
  1365. names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
  1366. for name in names:
  1367. if name != '__init__':
  1368. plugin = load_plugin(name)
  1369. if hasattr(plugin, 'plugin_init'):
  1370. obj = plugin.plugin_init(plugins)
  1371. plugins.append(obj or plugin)
  1372. else:
  1373. plugins.append(plugin)
  1374. class LogCatcher(logging.Handler):
  1375. """Logging handler for collecting logged messages so you can check them later"""
  1376. def __init__(self):
  1377. self.messages = []
  1378. logging.Handler.__init__(self, logging.WARNING)
  1379. def emit(self, record):
  1380. self.messages.append(bb.build.logformatter.format(record))
  1381. def contains(self, message):
  1382. return (message in self.messages)