utils.py 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579
  1. # ex:ts=4:sw=4:sts=4:et
  2. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  3. """
  4. BitBake Utility Functions
  5. """
  6. # Copyright (C) 2004 Michael Lauer
  7. #
  8. # SPDX-License-Identifier: GPL-2.0-only
  9. #
  10. # This program is free software; you can redistribute it and/or modify
  11. # it under the terms of the GNU General Public License version 2 as
  12. # published by the Free Software Foundation.
  13. #
  14. # This program is distributed in the hope that it will be useful,
  15. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. # GNU General Public License for more details.
  18. #
  19. # You should have received a copy of the GNU General Public License along
  20. # with this program; if not, write to the Free Software Foundation, Inc.,
  21. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  22. import re, fcntl, os, string, stat, shutil, time
  23. import sys
  24. import errno
  25. import logging
  26. import bb
  27. import bb.msg
  28. import multiprocessing
  29. import fcntl
  30. import importlib
  31. from importlib import machinery
  32. import itertools
  33. import subprocess
  34. import glob
  35. import fnmatch
  36. import traceback
  37. import errno
  38. import signal
  39. import ast
  40. import collections
  41. import copy
  42. from subprocess import getstatusoutput
  43. from contextlib import contextmanager
  44. from ctypes import cdll
  45. logger = logging.getLogger("BitBake.Util")
  46. python_extensions = importlib.machinery.all_suffixes()
  47. def clean_context():
  48. return {
  49. "os": os,
  50. "bb": bb,
  51. "time": time,
  52. }
  53. def get_context():
  54. return _context
  55. def set_context(ctx):
  56. _context = ctx
  57. # Context used in better_exec, eval
  58. _context = clean_context()
  59. class VersionStringException(Exception):
  60. """Exception raised when an invalid version specification is found"""
  61. def explode_version(s):
  62. r = []
  63. alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
  64. numeric_regexp = re.compile(r'^(\d+)(.*)$')
  65. while (s != ''):
  66. if s[0] in string.digits:
  67. m = numeric_regexp.match(s)
  68. r.append((0, int(m.group(1))))
  69. s = m.group(2)
  70. continue
  71. if s[0] in string.ascii_letters:
  72. m = alpha_regexp.match(s)
  73. r.append((1, m.group(1)))
  74. s = m.group(2)
  75. continue
  76. if s[0] == '~':
  77. r.append((-1, s[0]))
  78. else:
  79. r.append((2, s[0]))
  80. s = s[1:]
  81. return r
  82. def split_version(s):
  83. """Split a version string into its constituent parts (PE, PV, PR)"""
  84. s = s.strip(" <>=")
  85. e = 0
  86. if s.count(':'):
  87. e = int(s.split(":")[0])
  88. s = s.split(":")[1]
  89. r = ""
  90. if s.count('-'):
  91. r = s.rsplit("-", 1)[1]
  92. s = s.rsplit("-", 1)[0]
  93. v = s
  94. return (e, v, r)
  95. def vercmp_part(a, b):
  96. va = explode_version(a)
  97. vb = explode_version(b)
  98. while True:
  99. if va == []:
  100. (oa, ca) = (0, None)
  101. else:
  102. (oa, ca) = va.pop(0)
  103. if vb == []:
  104. (ob, cb) = (0, None)
  105. else:
  106. (ob, cb) = vb.pop(0)
  107. if (oa, ca) == (0, None) and (ob, cb) == (0, None):
  108. return 0
  109. if oa < ob:
  110. return -1
  111. elif oa > ob:
  112. return 1
  113. elif ca is None:
  114. return -1
  115. elif cb is None:
  116. return 1
  117. elif ca < cb:
  118. return -1
  119. elif ca > cb:
  120. return 1
  121. def vercmp(ta, tb):
  122. (ea, va, ra) = ta
  123. (eb, vb, rb) = tb
  124. r = int(ea or 0) - int(eb or 0)
  125. if (r == 0):
  126. r = vercmp_part(va, vb)
  127. if (r == 0):
  128. r = vercmp_part(ra, rb)
  129. return r
  130. def vercmp_string(a, b):
  131. ta = split_version(a)
  132. tb = split_version(b)
  133. return vercmp(ta, tb)
  134. def vercmp_string_op(a, b, op):
  135. """
  136. Compare two versions and check if the specified comparison operator matches the result of the comparison.
  137. This function is fairly liberal about what operators it will accept since there are a variety of styles
  138. depending on the context.
  139. """
  140. res = vercmp_string(a, b)
  141. if op in ('=', '=='):
  142. return res == 0
  143. elif op == '<=':
  144. return res <= 0
  145. elif op == '>=':
  146. return res >= 0
  147. elif op in ('>', '>>'):
  148. return res > 0
  149. elif op in ('<', '<<'):
  150. return res < 0
  151. elif op == '!=':
  152. return res != 0
  153. else:
  154. raise VersionStringException('Unsupported comparison operator "%s"' % op)
  155. def explode_deps(s):
  156. """
  157. Take an RDEPENDS style string of format:
  158. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  159. and return a list of dependencies.
  160. Version information is ignored.
  161. """
  162. r = []
  163. l = s.split()
  164. flag = False
  165. for i in l:
  166. if i[0] == '(':
  167. flag = True
  168. #j = []
  169. if not flag:
  170. r.append(i)
  171. #else:
  172. # j.append(i)
  173. if flag and i.endswith(')'):
  174. flag = False
  175. # Ignore version
  176. #r[-1] += ' ' + ' '.join(j)
  177. return r
  178. def explode_dep_versions2(s, *, sort=True):
  179. """
  180. Take an RDEPENDS style string of format:
  181. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  182. and return a dictionary of dependencies and versions.
  183. """
  184. r = collections.OrderedDict()
  185. l = s.replace(",", "").split()
  186. lastdep = None
  187. lastcmp = ""
  188. lastver = ""
  189. incmp = False
  190. inversion = False
  191. for i in l:
  192. if i[0] == '(':
  193. incmp = True
  194. i = i[1:].strip()
  195. if not i:
  196. continue
  197. if incmp:
  198. incmp = False
  199. inversion = True
  200. # This list is based on behavior and supported comparisons from deb, opkg and rpm.
  201. #
  202. # Even though =<, <<, ==, !=, =>, and >> may not be supported,
  203. # we list each possibly valid item.
  204. # The build system is responsible for validation of what it supports.
  205. if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
  206. lastcmp = i[0:2]
  207. i = i[2:]
  208. elif i.startswith(('<', '>', '=')):
  209. lastcmp = i[0:1]
  210. i = i[1:]
  211. else:
  212. # This is an unsupported case!
  213. raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
  214. lastcmp = (i or "")
  215. i = ""
  216. i.strip()
  217. if not i:
  218. continue
  219. if inversion:
  220. if i.endswith(')'):
  221. i = i[:-1] or ""
  222. inversion = False
  223. if lastver and i:
  224. lastver += " "
  225. if i:
  226. lastver += i
  227. if lastdep not in r:
  228. r[lastdep] = []
  229. r[lastdep].append(lastcmp + " " + lastver)
  230. continue
  231. #if not inversion:
  232. lastdep = i
  233. lastver = ""
  234. lastcmp = ""
  235. if not (i in r and r[i]):
  236. r[lastdep] = []
  237. if sort:
  238. r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
  239. return r
  240. def explode_dep_versions(s):
  241. r = explode_dep_versions2(s)
  242. for d in r:
  243. if not r[d]:
  244. r[d] = None
  245. continue
  246. if len(r[d]) > 1:
  247. bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
  248. r[d] = r[d][0]
  249. return r
  250. def join_deps(deps, commasep=True):
  251. """
  252. Take the result from explode_dep_versions and generate a dependency string
  253. """
  254. result = []
  255. for dep in deps:
  256. if deps[dep]:
  257. if isinstance(deps[dep], list):
  258. for v in deps[dep]:
  259. result.append(dep + " (" + v + ")")
  260. else:
  261. result.append(dep + " (" + deps[dep] + ")")
  262. else:
  263. result.append(dep)
  264. if commasep:
  265. return ", ".join(result)
  266. else:
  267. return " ".join(result)
  268. def _print_trace(body, line):
  269. """
  270. Print the Environment of a Text Body
  271. """
  272. error = []
  273. # print the environment of the method
  274. min_line = max(1, line-4)
  275. max_line = min(line + 4, len(body))
  276. for i in range(min_line, max_line + 1):
  277. if line == i:
  278. error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
  279. else:
  280. error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
  281. return error
  282. def better_compile(text, file, realfile, mode = "exec", lineno = 0):
  283. """
  284. A better compile method. This method
  285. will print the offending lines.
  286. """
  287. try:
  288. cache = bb.methodpool.compile_cache(text)
  289. if cache:
  290. return cache
  291. # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
  292. text2 = "\n" * int(lineno) + text
  293. code = compile(text2, realfile, mode)
  294. bb.methodpool.compile_cache_add(text, code)
  295. return code
  296. except Exception as e:
  297. error = []
  298. # split the text into lines again
  299. body = text.split('\n')
  300. error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
  301. if hasattr(e, "lineno"):
  302. error.append("The code lines resulting in this error were:")
  303. # e.lineno: line's position in reaflile
  304. # lineno: function name's "position -1" in realfile
  305. # e.lineno - lineno: line's relative position in function
  306. error.extend(_print_trace(body, e.lineno - lineno))
  307. else:
  308. error.append("The function causing this error was:")
  309. for line in body:
  310. error.append(line)
  311. error.append("%s: %s" % (e.__class__.__name__, str(e)))
  312. logger.error("\n".join(error))
  313. e = bb.BBHandledException(e)
  314. raise e
  315. def _print_exception(t, value, tb, realfile, text, context):
  316. error = []
  317. try:
  318. exception = traceback.format_exception_only(t, value)
  319. error.append('Error executing a python function in %s:\n' % realfile)
  320. # Strip 'us' from the stack (better_exec call) unless that was where the
  321. # error came from
  322. if tb.tb_next is not None:
  323. tb = tb.tb_next
  324. textarray = text.split('\n')
  325. linefailed = tb.tb_lineno
  326. tbextract = traceback.extract_tb(tb)
  327. tbformat = traceback.format_list(tbextract)
  328. error.append("The stack trace of python calls that resulted in this exception/failure was:")
  329. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
  330. error.extend(_print_trace(textarray, linefailed))
  331. # See if this is a function we constructed and has calls back into other functions in
  332. # "text". If so, try and improve the context of the error by diving down the trace
  333. level = 0
  334. nexttb = tb.tb_next
  335. while nexttb is not None and (level+1) < len(tbextract):
  336. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
  337. if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
  338. # The code was possibly in the string we compiled ourselves
  339. error.extend(_print_trace(textarray, tbextract[level+1][1]))
  340. elif tbextract[level+1][0].startswith("/"):
  341. # The code looks like it might be in a file, try and load it
  342. try:
  343. with open(tbextract[level+1][0], "r") as f:
  344. text = f.readlines()
  345. error.extend(_print_trace(text, tbextract[level+1][1]))
  346. except:
  347. error.append(tbformat[level+1])
  348. else:
  349. error.append(tbformat[level+1])
  350. nexttb = tb.tb_next
  351. level = level + 1
  352. error.append("Exception: %s" % ''.join(exception))
  353. # If the exception is from spwaning a task, let's be helpful and display
  354. # the output (which hopefully includes stderr).
  355. if isinstance(value, subprocess.CalledProcessError) and value.output:
  356. error.append("Subprocess output:")
  357. error.append(value.output.decode("utf-8", errors="ignore"))
  358. finally:
  359. logger.error("\n".join(error))
  360. def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
  361. """
  362. Similiar to better_compile, better_exec will
  363. print the lines that are responsible for the
  364. error.
  365. """
  366. import bb.parse
  367. if not text:
  368. text = code
  369. if not hasattr(code, "co_filename"):
  370. code = better_compile(code, realfile, realfile)
  371. try:
  372. exec(code, get_context(), context)
  373. except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
  374. # Error already shown so passthrough, no need for traceback
  375. raise
  376. except Exception as e:
  377. if pythonexception:
  378. raise
  379. (t, value, tb) = sys.exc_info()
  380. try:
  381. _print_exception(t, value, tb, realfile, text, context)
  382. except Exception as e:
  383. logger.error("Exception handler error: %s" % str(e))
  384. e = bb.BBHandledException(e)
  385. raise e
  386. def simple_exec(code, context):
  387. exec(code, get_context(), context)
  388. def better_eval(source, locals, extraglobals = None):
  389. ctx = get_context()
  390. if extraglobals:
  391. ctx = copy.copy(ctx)
  392. for g in extraglobals:
  393. ctx[g] = extraglobals[g]
  394. return eval(source, ctx, locals)
  395. @contextmanager
  396. def fileslocked(files):
  397. """Context manager for locking and unlocking file locks."""
  398. locks = []
  399. if files:
  400. for lockfile in files:
  401. locks.append(bb.utils.lockfile(lockfile))
  402. yield
  403. for lock in locks:
  404. bb.utils.unlockfile(lock)
  405. @contextmanager
  406. def timeout(seconds):
  407. def timeout_handler(signum, frame):
  408. pass
  409. original_handler = signal.signal(signal.SIGALRM, timeout_handler)
  410. try:
  411. signal.alarm(seconds)
  412. yield
  413. finally:
  414. signal.alarm(0)
  415. signal.signal(signal.SIGALRM, original_handler)
  416. def lockfile(name, shared=False, retry=True, block=False):
  417. """
  418. Use the specified file as a lock file, return when the lock has
  419. been acquired. Returns a variable to pass to unlockfile().
  420. Parameters:
  421. retry: True to re-try locking if it fails, False otherwise
  422. block: True to block until the lock succeeds, False otherwise
  423. The retry and block parameters are kind of equivalent unless you
  424. consider the possibility of sending a signal to the process to break
  425. out - at which point you want block=True rather than retry=True.
  426. """
  427. dirname = os.path.dirname(name)
  428. mkdirhier(dirname)
  429. if not os.access(dirname, os.W_OK):
  430. logger.error("Unable to acquire lock '%s', directory is not writable",
  431. name)
  432. sys.exit(1)
  433. op = fcntl.LOCK_EX
  434. if shared:
  435. op = fcntl.LOCK_SH
  436. if not retry and not block:
  437. op = op | fcntl.LOCK_NB
  438. while True:
  439. # If we leave the lockfiles lying around there is no problem
  440. # but we should clean up after ourselves. This gives potential
  441. # for races though. To work around this, when we acquire the lock
  442. # we check the file we locked was still the lock file on disk.
  443. # by comparing inode numbers. If they don't match or the lockfile
  444. # no longer exists, we start again.
  445. # This implementation is unfair since the last person to request the
  446. # lock is the most likely to win it.
  447. try:
  448. lf = open(name, 'a+')
  449. fileno = lf.fileno()
  450. fcntl.flock(fileno, op)
  451. statinfo = os.fstat(fileno)
  452. if os.path.exists(lf.name):
  453. statinfo2 = os.stat(lf.name)
  454. if statinfo.st_ino == statinfo2.st_ino:
  455. return lf
  456. lf.close()
  457. except OSError as e:
  458. if e.errno == errno.EACCES:
  459. logger.error("Unable to acquire lock '%s', %s",
  460. e.strerror, name)
  461. sys.exit(1)
  462. try:
  463. lf.close()
  464. except Exception:
  465. pass
  466. pass
  467. if not retry:
  468. return None
  469. def unlockfile(lf):
  470. """
  471. Unlock a file locked using lockfile()
  472. """
  473. try:
  474. # If we had a shared lock, we need to promote to exclusive before
  475. # removing the lockfile. Attempt this, ignore failures.
  476. fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
  477. os.unlink(lf.name)
  478. except (IOError, OSError):
  479. pass
  480. fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
  481. lf.close()
  482. def md5_file(filename):
  483. """
  484. Return the hex string representation of the MD5 checksum of filename.
  485. """
  486. import hashlib, mmap
  487. with open(filename, "rb") as f:
  488. m = hashlib.md5()
  489. try:
  490. with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
  491. for chunk in iter(lambda: mm.read(8192), b''):
  492. m.update(chunk)
  493. except ValueError:
  494. # You can't mmap() an empty file so silence this exception
  495. pass
  496. return m.hexdigest()
  497. def sha256_file(filename):
  498. """
  499. Return the hex string representation of the 256-bit SHA checksum of
  500. filename.
  501. """
  502. import hashlib
  503. s = hashlib.sha256()
  504. with open(filename, "rb") as f:
  505. for line in f:
  506. s.update(line)
  507. return s.hexdigest()
  508. def sha1_file(filename):
  509. """
  510. Return the hex string representation of the SHA1 checksum of the filename
  511. """
  512. import hashlib
  513. s = hashlib.sha1()
  514. with open(filename, "rb") as f:
  515. for line in f:
  516. s.update(line)
  517. return s.hexdigest()
  518. def preserved_envvars_exported():
  519. """Variables which are taken from the environment and placed in and exported
  520. from the metadata"""
  521. return [
  522. 'BB_TASKHASH',
  523. 'HOME',
  524. 'LOGNAME',
  525. 'PATH',
  526. 'PWD',
  527. 'SHELL',
  528. 'TERM',
  529. 'USER',
  530. 'LC_ALL',
  531. 'BBSERVER',
  532. ]
  533. def preserved_envvars():
  534. """Variables which are taken from the environment and placed in the metadata"""
  535. v = [
  536. 'BBPATH',
  537. 'BB_PRESERVE_ENV',
  538. 'BB_ENV_WHITELIST',
  539. 'BB_ENV_EXTRAWHITE',
  540. ]
  541. return v + preserved_envvars_exported()
  542. def filter_environment(good_vars):
  543. """
  544. Create a pristine environment for bitbake. This will remove variables that
  545. are not known and may influence the build in a negative way.
  546. """
  547. removed_vars = {}
  548. for key in list(os.environ):
  549. if key in good_vars:
  550. continue
  551. removed_vars[key] = os.environ[key]
  552. del os.environ[key]
  553. # If we spawn a python process, we need to have a UTF-8 locale, else python's file
  554. # access methods will use ascii. You can't change that mode once the interpreter is
  555. # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
  556. # distros support that and we need to set something.
  557. os.environ["LC_ALL"] = "en_US.UTF-8"
  558. if removed_vars:
  559. logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
  560. return removed_vars
  561. def approved_variables():
  562. """
  563. Determine and return the list of whitelisted variables which are approved
  564. to remain in the environment.
  565. """
  566. if 'BB_PRESERVE_ENV' in os.environ:
  567. return os.environ.keys()
  568. approved = []
  569. if 'BB_ENV_WHITELIST' in os.environ:
  570. approved = os.environ['BB_ENV_WHITELIST'].split()
  571. approved.extend(['BB_ENV_WHITELIST'])
  572. else:
  573. approved = preserved_envvars()
  574. if 'BB_ENV_EXTRAWHITE' in os.environ:
  575. approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
  576. if 'BB_ENV_EXTRAWHITE' not in approved:
  577. approved.extend(['BB_ENV_EXTRAWHITE'])
  578. return approved
  579. def clean_environment():
  580. """
  581. Clean up any spurious environment variables. This will remove any
  582. variables the user hasn't chosen to preserve.
  583. """
  584. if 'BB_PRESERVE_ENV' not in os.environ:
  585. good_vars = approved_variables()
  586. return filter_environment(good_vars)
  587. return {}
  588. def empty_environment():
  589. """
  590. Remove all variables from the environment.
  591. """
  592. for s in list(os.environ.keys()):
  593. os.unsetenv(s)
  594. del os.environ[s]
  595. def build_environment(d):
  596. """
  597. Build an environment from all exported variables.
  598. """
  599. import bb.data
  600. for var in bb.data.keys(d):
  601. export = d.getVarFlag(var, "export", False)
  602. if export:
  603. os.environ[var] = d.getVar(var) or ""
  604. def _check_unsafe_delete_path(path):
  605. """
  606. Basic safeguard against recursively deleting something we shouldn't. If it returns True,
  607. the caller should raise an exception with an appropriate message.
  608. NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
  609. with potentially disastrous results.
  610. """
  611. extra = ''
  612. # HOME might not be /home/something, so in case we can get it, check against it
  613. homedir = os.environ.get('HOME', '')
  614. if homedir:
  615. extra = '|%s' % homedir
  616. if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
  617. return True
  618. return False
  619. def remove(path, recurse=False):
  620. """Equivalent to rm -f or rm -rf"""
  621. if not path:
  622. return
  623. if recurse:
  624. for name in glob.glob(path):
  625. if _check_unsafe_delete_path(path):
  626. raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
  627. # shutil.rmtree(name) would be ideal but its too slow
  628. subprocess.check_call(['rm', '-rf'] + glob.glob(path))
  629. return
  630. for name in glob.glob(path):
  631. try:
  632. os.unlink(name)
  633. except OSError as exc:
  634. if exc.errno != errno.ENOENT:
  635. raise
  636. def prunedir(topdir):
  637. # Delete everything reachable from the directory named in 'topdir'.
  638. # CAUTION: This is dangerous!
  639. if _check_unsafe_delete_path(topdir):
  640. raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
  641. remove(topdir, recurse=True)
  642. #
  643. # Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
  644. # but thats possibly insane and suffixes is probably going to be small
  645. #
  646. def prune_suffix(var, suffixes, d):
  647. # See if var ends with any of the suffixes listed and
  648. # remove it if found
  649. for suffix in suffixes:
  650. if suffix and var.endswith(suffix):
  651. return var[:-len(suffix)]
  652. return var
  653. def mkdirhier(directory):
  654. """Create a directory like 'mkdir -p', but does not complain if
  655. directory already exists like os.makedirs
  656. """
  657. try:
  658. os.makedirs(directory)
  659. except OSError as e:
  660. if e.errno != errno.EEXIST or not os.path.isdir(directory):
  661. raise e
  662. def movefile(src, dest, newmtime = None, sstat = None):
  663. """Moves a file from src to dest, preserving all permissions and
  664. attributes; mtime will be preserved even when moving across
  665. filesystems. Returns true on success and false on failure. Move is
  666. atomic.
  667. """
  668. #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  669. try:
  670. if not sstat:
  671. sstat = os.lstat(src)
  672. except Exception as e:
  673. print("movefile: Stating source file failed...", e)
  674. return None
  675. destexists = 1
  676. try:
  677. dstat = os.lstat(dest)
  678. except:
  679. dstat = os.lstat(os.path.dirname(dest))
  680. destexists = 0
  681. if destexists:
  682. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  683. try:
  684. os.unlink(dest)
  685. destexists = 0
  686. except Exception as e:
  687. pass
  688. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  689. try:
  690. target = os.readlink(src)
  691. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  692. os.unlink(dest)
  693. os.symlink(target, dest)
  694. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  695. os.unlink(src)
  696. return os.lstat(dest)
  697. except Exception as e:
  698. print("movefile: failed to properly create symlink:", dest, "->", target, e)
  699. return None
  700. renamefailed = 1
  701. # os.rename needs to know the dest path ending with file name
  702. # so append the file name to a path only if it's a dir specified
  703. srcfname = os.path.basename(src)
  704. destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
  705. else dest
  706. if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
  707. try:
  708. os.rename(src, destpath)
  709. renamefailed = 0
  710. except Exception as e:
  711. if e[0] != errno.EXDEV:
  712. # Some random error.
  713. print("movefile: Failed to move", src, "to", dest, e)
  714. return None
  715. # Invalid cross-device-link 'bind' mounted or actually Cross-Device
  716. if renamefailed:
  717. didcopy = 0
  718. if stat.S_ISREG(sstat[stat.ST_MODE]):
  719. try: # For safety copy then move it over.
  720. shutil.copyfile(src, destpath + "#new")
  721. os.rename(destpath + "#new", destpath)
  722. didcopy = 1
  723. except Exception as e:
  724. print('movefile: copy', src, '->', dest, 'failed.', e)
  725. return None
  726. else:
  727. #we don't yet handle special, so we need to fall back to /bin/mv
  728. a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
  729. if a[0] != 0:
  730. print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
  731. return None # failure
  732. try:
  733. if didcopy:
  734. os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
  735. os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  736. os.unlink(src)
  737. except Exception as e:
  738. print("movefile: Failed to chown/chmod/unlink", dest, e)
  739. return None
  740. if newmtime:
  741. os.utime(destpath, (newmtime, newmtime))
  742. else:
  743. os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  744. newmtime = sstat[stat.ST_MTIME]
  745. return newmtime
  746. def copyfile(src, dest, newmtime = None, sstat = None):
  747. """
  748. Copies a file from src to dest, preserving all permissions and
  749. attributes; mtime will be preserved even when moving across
  750. filesystems. Returns true on success and false on failure.
  751. """
  752. #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  753. try:
  754. if not sstat:
  755. sstat = os.lstat(src)
  756. except Exception as e:
  757. logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
  758. return False
  759. destexists = 1
  760. try:
  761. dstat = os.lstat(dest)
  762. except:
  763. dstat = os.lstat(os.path.dirname(dest))
  764. destexists = 0
  765. if destexists:
  766. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  767. try:
  768. os.unlink(dest)
  769. destexists = 0
  770. except Exception as e:
  771. pass
  772. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  773. try:
  774. target = os.readlink(src)
  775. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  776. os.unlink(dest)
  777. os.symlink(target, dest)
  778. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  779. return os.lstat(dest)
  780. except Exception as e:
  781. logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
  782. return False
  783. if stat.S_ISREG(sstat[stat.ST_MODE]):
  784. try:
  785. srcchown = False
  786. if not os.access(src, os.R_OK):
  787. # Make sure we can read it
  788. srcchown = True
  789. os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
  790. # For safety copy then move it over.
  791. shutil.copyfile(src, dest + "#new")
  792. os.rename(dest + "#new", dest)
  793. except Exception as e:
  794. logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
  795. return False
  796. finally:
  797. if srcchown:
  798. os.chmod(src, sstat[stat.ST_MODE])
  799. os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  800. else:
  801. #we don't yet handle special, so we need to fall back to /bin/mv
  802. a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
  803. if a[0] != 0:
  804. logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
  805. return False # failure
  806. try:
  807. os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
  808. os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  809. except Exception as e:
  810. logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
  811. return False
  812. if newmtime:
  813. os.utime(dest, (newmtime, newmtime))
  814. else:
  815. os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  816. newmtime = sstat[stat.ST_MTIME]
  817. return newmtime
  818. def break_hardlinks(src, sstat = None):
  819. """
  820. Ensures src is the only hardlink to this file. Other hardlinks,
  821. if any, are not affected (other than in their st_nlink value, of
  822. course). Returns true on success and false on failure.
  823. """
  824. try:
  825. if not sstat:
  826. sstat = os.lstat(src)
  827. except Exception as e:
  828. logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
  829. return False
  830. if sstat[stat.ST_NLINK] == 1:
  831. return True
  832. return copyfile(src, src, sstat=sstat)
  833. def which(path, item, direction = 0, history = False, executable=False):
  834. """
  835. Locate `item` in the list of paths `path` (colon separated string like $PATH).
  836. If `direction` is non-zero then the list is reversed.
  837. If `history` is True then the list of candidates also returned as result,history.
  838. If `executable` is True then the candidate has to be an executable file,
  839. otherwise the candidate simply has to exist.
  840. """
  841. if executable:
  842. is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
  843. else:
  844. is_candidate = lambda p: os.path.exists(p)
  845. hist = []
  846. paths = (path or "").split(':')
  847. if direction != 0:
  848. paths.reverse()
  849. for p in paths:
  850. next = os.path.join(p, item)
  851. hist.append(next)
  852. if is_candidate(next):
  853. if not os.path.isabs(next):
  854. next = os.path.abspath(next)
  855. if history:
  856. return next, hist
  857. return next
  858. if history:
  859. return "", hist
  860. return ""
  861. def to_boolean(string, default=None):
  862. if not string:
  863. return default
  864. normalized = string.lower()
  865. if normalized in ("y", "yes", "1", "true"):
  866. return True
  867. elif normalized in ("n", "no", "0", "false"):
  868. return False
  869. else:
  870. raise ValueError("Invalid value for to_boolean: %s" % string)
  871. def contains(variable, checkvalues, truevalue, falsevalue, d):
  872. """Check if a variable contains all the values specified.
  873. Arguments:
  874. variable -- the variable name. This will be fetched and expanded (using
  875. d.getVar(variable)) and then split into a set().
  876. checkvalues -- if this is a string it is split on whitespace into a set(),
  877. otherwise coerced directly into a set().
  878. truevalue -- the value to return if checkvalues is a subset of variable.
  879. falsevalue -- the value to return if variable is empty or if checkvalues is
  880. not a subset of variable.
  881. d -- the data store.
  882. """
  883. val = d.getVar(variable)
  884. if not val:
  885. return falsevalue
  886. val = set(val.split())
  887. if isinstance(checkvalues, str):
  888. checkvalues = set(checkvalues.split())
  889. else:
  890. checkvalues = set(checkvalues)
  891. if checkvalues.issubset(val):
  892. return truevalue
  893. return falsevalue
  894. def contains_any(variable, checkvalues, truevalue, falsevalue, d):
  895. val = d.getVar(variable)
  896. if not val:
  897. return falsevalue
  898. val = set(val.split())
  899. if isinstance(checkvalues, str):
  900. checkvalues = set(checkvalues.split())
  901. else:
  902. checkvalues = set(checkvalues)
  903. if checkvalues & val:
  904. return truevalue
  905. return falsevalue
  906. def filter(variable, checkvalues, d):
  907. """Return all words in the variable that are present in the checkvalues.
  908. Arguments:
  909. variable -- the variable name. This will be fetched and expanded (using
  910. d.getVar(variable)) and then split into a set().
  911. checkvalues -- if this is a string it is split on whitespace into a set(),
  912. otherwise coerced directly into a set().
  913. d -- the data store.
  914. """
  915. val = d.getVar(variable)
  916. if not val:
  917. return ''
  918. val = set(val.split())
  919. if isinstance(checkvalues, str):
  920. checkvalues = set(checkvalues.split())
  921. else:
  922. checkvalues = set(checkvalues)
  923. return ' '.join(sorted(checkvalues & val))
  924. def cpu_count():
  925. return multiprocessing.cpu_count()
  926. def nonblockingfd(fd):
  927. fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
  928. def process_profilelog(fn, pout = None):
  929. # Either call with a list of filenames and set pout or a filename and optionally pout.
  930. if not pout:
  931. pout = fn + '.processed'
  932. pout = open(pout, 'w')
  933. import pstats
  934. if isinstance(fn, list):
  935. p = pstats.Stats(*fn, stream=pout)
  936. else:
  937. p = pstats.Stats(fn, stream=pout)
  938. p.sort_stats('time')
  939. p.print_stats()
  940. p.print_callers()
  941. p.sort_stats('cumulative')
  942. p.print_stats()
  943. pout.flush()
  944. pout.close()
  945. #
  946. # Was present to work around multiprocessing pool bugs in python < 2.7.3
  947. #
  948. def multiprocessingpool(*args, **kwargs):
  949. import multiprocessing.pool
  950. #import multiprocessing.util
  951. #multiprocessing.util.log_to_stderr(10)
  952. # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
  953. # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
  954. def wrapper(func):
  955. def wrap(self, timeout=None):
  956. return func(self, timeout=timeout if timeout is not None else 1e100)
  957. return wrap
  958. multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
  959. return multiprocessing.Pool(*args, **kwargs)
  960. def exec_flat_python_func(func, *args, **kwargs):
  961. """Execute a flat python function (defined with def funcname(args):...)"""
  962. # Prepare a small piece of python code which calls the requested function
  963. # To do this we need to prepare two things - a set of variables we can use to pass
  964. # the values of arguments into the calling function, and the list of arguments for
  965. # the function being called
  966. context = {}
  967. funcargs = []
  968. # Handle unnamed arguments
  969. aidx = 1
  970. for arg in args:
  971. argname = 'arg_%s' % aidx
  972. context[argname] = arg
  973. funcargs.append(argname)
  974. aidx += 1
  975. # Handle keyword arguments
  976. context.update(kwargs)
  977. funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
  978. code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
  979. comp = bb.utils.better_compile(code, '<string>', '<string>')
  980. bb.utils.better_exec(comp, context, code, '<string>')
  981. return context['retval']
  982. def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
  983. """Edit lines from a recipe or config file and modify one or more
  984. specified variable values set in the file using a specified callback
  985. function. Lines are expected to have trailing newlines.
  986. Parameters:
  987. meta_lines: lines from the file; can be a list or an iterable
  988. (e.g. file pointer)
  989. variables: a list of variable names to look for. Functions
  990. may also be specified, but must be specified with '()' at
  991. the end of the name. Note that the function doesn't have
  992. any intrinsic understanding of _append, _prepend, _remove,
  993. or overrides, so these are considered as part of the name.
  994. These values go into a regular expression, so regular
  995. expression syntax is allowed.
  996. varfunc: callback function called for every variable matching
  997. one of the entries in the variables parameter. The function
  998. should take four arguments:
  999. varname: name of variable matched
  1000. origvalue: current value in file
  1001. op: the operator (e.g. '+=')
  1002. newlines: list of lines up to this point. You can use
  1003. this to prepend lines before this variable setting
  1004. if you wish.
  1005. and should return a four-element tuple:
  1006. newvalue: new value to substitute in, or None to drop
  1007. the variable setting entirely. (If the removal
  1008. results in two consecutive blank lines, one of the
  1009. blank lines will also be dropped).
  1010. newop: the operator to use - if you specify None here,
  1011. the original operation will be used.
  1012. indent: number of spaces to indent multi-line entries,
  1013. or -1 to indent up to the level of the assignment
  1014. and opening quote, or a string to use as the indent.
  1015. minbreak: True to allow the first element of a
  1016. multi-line value to continue on the same line as
  1017. the assignment, False to indent before the first
  1018. element.
  1019. To clarify, if you wish not to change the value, then you
  1020. would return like this: return origvalue, None, 0, True
  1021. match_overrides: True to match items with _overrides on the end,
  1022. False otherwise
  1023. Returns a tuple:
  1024. updated:
  1025. True if changes were made, False otherwise.
  1026. newlines:
  1027. Lines after processing
  1028. """
  1029. var_res = {}
  1030. if match_overrides:
  1031. override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
  1032. else:
  1033. override_re = ''
  1034. for var in variables:
  1035. if var.endswith('()'):
  1036. var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
  1037. else:
  1038. var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
  1039. updated = False
  1040. varset_start = ''
  1041. varlines = []
  1042. newlines = []
  1043. in_var = None
  1044. full_value = ''
  1045. var_end = ''
  1046. def handle_var_end():
  1047. prerun_newlines = newlines[:]
  1048. op = varset_start[len(in_var):].strip()
  1049. (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
  1050. changed = (prerun_newlines != newlines)
  1051. if newvalue is None:
  1052. # Drop the value
  1053. return True
  1054. elif newvalue != full_value or (newop not in [None, op]):
  1055. if newop not in [None, op]:
  1056. # Callback changed the operator
  1057. varset_new = "%s %s" % (in_var, newop)
  1058. else:
  1059. varset_new = varset_start
  1060. if isinstance(indent, int):
  1061. if indent == -1:
  1062. indentspc = ' ' * (len(varset_new) + 2)
  1063. else:
  1064. indentspc = ' ' * indent
  1065. else:
  1066. indentspc = indent
  1067. if in_var.endswith('()'):
  1068. # A function definition
  1069. if isinstance(newvalue, list):
  1070. newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
  1071. else:
  1072. if not newvalue.startswith('\n'):
  1073. newvalue = '\n' + newvalue
  1074. if not newvalue.endswith('\n'):
  1075. newvalue = newvalue + '\n'
  1076. newlines.append('%s {%s}\n' % (varset_new, newvalue))
  1077. else:
  1078. # Normal variable
  1079. if isinstance(newvalue, list):
  1080. if not newvalue:
  1081. # Empty list -> empty string
  1082. newlines.append('%s ""\n' % varset_new)
  1083. elif minbreak:
  1084. # First item on first line
  1085. if len(newvalue) == 1:
  1086. newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
  1087. else:
  1088. newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
  1089. for item in newvalue[1:]:
  1090. newlines.append('%s%s \\\n' % (indentspc, item))
  1091. newlines.append('%s"\n' % indentspc)
  1092. else:
  1093. # No item on first line
  1094. newlines.append('%s " \\\n' % varset_new)
  1095. for item in newvalue:
  1096. newlines.append('%s%s \\\n' % (indentspc, item))
  1097. newlines.append('%s"\n' % indentspc)
  1098. else:
  1099. newlines.append('%s "%s"\n' % (varset_new, newvalue))
  1100. return True
  1101. else:
  1102. # Put the old lines back where they were
  1103. newlines.extend(varlines)
  1104. # If newlines was touched by the function, we'll need to return True
  1105. return changed
  1106. checkspc = False
  1107. for line in meta_lines:
  1108. if in_var:
  1109. value = line.rstrip()
  1110. varlines.append(line)
  1111. if in_var.endswith('()'):
  1112. full_value += '\n' + value
  1113. else:
  1114. full_value += value[:-1]
  1115. if value.endswith(var_end):
  1116. if in_var.endswith('()'):
  1117. if full_value.count('{') - full_value.count('}') >= 0:
  1118. continue
  1119. full_value = full_value[:-1]
  1120. if handle_var_end():
  1121. updated = True
  1122. checkspc = True
  1123. in_var = None
  1124. else:
  1125. skip = False
  1126. for (varname, var_re) in var_res.items():
  1127. res = var_re.match(line)
  1128. if res:
  1129. isfunc = varname.endswith('()')
  1130. if isfunc:
  1131. splitvalue = line.split('{', 1)
  1132. var_end = '}'
  1133. else:
  1134. var_end = res.groups()[-1]
  1135. splitvalue = line.split(var_end, 1)
  1136. varset_start = splitvalue[0].rstrip()
  1137. value = splitvalue[1].rstrip()
  1138. if not isfunc and value.endswith('\\'):
  1139. value = value[:-1]
  1140. full_value = value
  1141. varlines = [line]
  1142. in_var = res.group(1)
  1143. if isfunc:
  1144. in_var += '()'
  1145. if value.endswith(var_end):
  1146. full_value = full_value[:-1]
  1147. if handle_var_end():
  1148. updated = True
  1149. checkspc = True
  1150. in_var = None
  1151. skip = True
  1152. break
  1153. if not skip:
  1154. if checkspc:
  1155. checkspc = False
  1156. if newlines and newlines[-1] == '\n' and line == '\n':
  1157. # Squash blank line if there are two consecutive blanks after a removal
  1158. continue
  1159. newlines.append(line)
  1160. return (updated, newlines)
  1161. def edit_metadata_file(meta_file, variables, varfunc):
  1162. """Edit a recipe or config file and modify one or more specified
  1163. variable values set in the file using a specified callback function.
  1164. The file is only written to if the value(s) actually change.
  1165. This is basically the file version of edit_metadata(), see that
  1166. function's description for parameter/usage information.
  1167. Returns True if the file was written to, False otherwise.
  1168. """
  1169. with open(meta_file, 'r') as f:
  1170. (updated, newlines) = edit_metadata(f, variables, varfunc)
  1171. if updated:
  1172. with open(meta_file, 'w') as f:
  1173. f.writelines(newlines)
  1174. return updated
  1175. def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
  1176. """Edit bblayers.conf, adding and/or removing layers
  1177. Parameters:
  1178. bblayers_conf: path to bblayers.conf file to edit
  1179. add: layer path (or list of layer paths) to add; None or empty
  1180. list to add nothing
  1181. remove: layer path (or list of layer paths) to remove; None or
  1182. empty list to remove nothing
  1183. edit_cb: optional callback function that will be called after
  1184. processing adds/removes once per existing entry.
  1185. Returns a tuple:
  1186. notadded: list of layers specified to be added but weren't
  1187. (because they were already in the list)
  1188. notremoved: list of layers that were specified to be removed
  1189. but weren't (because they weren't in the list)
  1190. """
  1191. import fnmatch
  1192. def remove_trailing_sep(pth):
  1193. if pth and pth[-1] == os.sep:
  1194. pth = pth[:-1]
  1195. return pth
  1196. approved = bb.utils.approved_variables()
  1197. def canonicalise_path(pth):
  1198. pth = remove_trailing_sep(pth)
  1199. if 'HOME' in approved and '~' in pth:
  1200. pth = os.path.expanduser(pth)
  1201. return pth
  1202. def layerlist_param(value):
  1203. if not value:
  1204. return []
  1205. elif isinstance(value, list):
  1206. return [remove_trailing_sep(x) for x in value]
  1207. else:
  1208. return [remove_trailing_sep(value)]
  1209. addlayers = layerlist_param(add)
  1210. removelayers = layerlist_param(remove)
  1211. # Need to use a list here because we can't set non-local variables from a callback in python 2.x
  1212. bblayercalls = []
  1213. removed = []
  1214. plusequals = False
  1215. orig_bblayers = []
  1216. def handle_bblayers_firstpass(varname, origvalue, op, newlines):
  1217. bblayercalls.append(op)
  1218. if op == '=':
  1219. del orig_bblayers[:]
  1220. orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
  1221. return (origvalue, None, 2, False)
  1222. def handle_bblayers(varname, origvalue, op, newlines):
  1223. updated = False
  1224. bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
  1225. if removelayers:
  1226. for removelayer in removelayers:
  1227. for layer in bblayers:
  1228. if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
  1229. updated = True
  1230. bblayers.remove(layer)
  1231. removed.append(removelayer)
  1232. break
  1233. if addlayers and not plusequals:
  1234. for addlayer in addlayers:
  1235. if addlayer not in bblayers:
  1236. updated = True
  1237. bblayers.append(addlayer)
  1238. del addlayers[:]
  1239. if edit_cb:
  1240. newlist = []
  1241. for layer in bblayers:
  1242. res = edit_cb(layer, canonicalise_path(layer))
  1243. if res != layer:
  1244. newlist.append(res)
  1245. updated = True
  1246. else:
  1247. newlist.append(layer)
  1248. bblayers = newlist
  1249. if updated:
  1250. if op == '+=' and not bblayers:
  1251. bblayers = None
  1252. return (bblayers, None, 2, False)
  1253. else:
  1254. return (origvalue, None, 2, False)
  1255. with open(bblayers_conf, 'r') as f:
  1256. (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
  1257. if not bblayercalls:
  1258. raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
  1259. # Try to do the "smart" thing depending on how the user has laid out
  1260. # their bblayers.conf file
  1261. if bblayercalls.count('+=') > 1:
  1262. plusequals = True
  1263. removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
  1264. notadded = []
  1265. for layer in addlayers:
  1266. layer_canon = canonicalise_path(layer)
  1267. if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
  1268. notadded.append(layer)
  1269. notadded_canon = [canonicalise_path(layer) for layer in notadded]
  1270. addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
  1271. (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
  1272. if addlayers:
  1273. # Still need to add these
  1274. for addlayer in addlayers:
  1275. newlines.append('BBLAYERS += "%s"\n' % addlayer)
  1276. updated = True
  1277. if updated:
  1278. with open(bblayers_conf, 'w') as f:
  1279. f.writelines(newlines)
  1280. notremoved = list(set(removelayers) - set(removed))
  1281. return (notadded, notremoved)
  1282. def get_file_layer(filename, d):
  1283. """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
  1284. collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
  1285. collection_res = {}
  1286. for collection in collections:
  1287. collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
  1288. def path_to_layer(path):
  1289. # Use longest path so we handle nested layers
  1290. matchlen = 0
  1291. match = None
  1292. for collection, regex in collection_res.items():
  1293. if len(regex) > matchlen and re.match(regex, path):
  1294. matchlen = len(regex)
  1295. match = collection
  1296. return match
  1297. result = None
  1298. bbfiles = (d.getVar('BBFILES') or '').split()
  1299. bbfilesmatch = False
  1300. for bbfilesentry in bbfiles:
  1301. if fnmatch.fnmatch(filename, bbfilesentry):
  1302. bbfilesmatch = True
  1303. result = path_to_layer(bbfilesentry)
  1304. if not bbfilesmatch:
  1305. # Probably a bbclass
  1306. result = path_to_layer(filename)
  1307. return result
  1308. # Constant taken from http://linux.die.net/include/linux/prctl.h
  1309. PR_SET_PDEATHSIG = 1
  1310. class PrCtlError(Exception):
  1311. pass
  1312. def signal_on_parent_exit(signame):
  1313. """
  1314. Trigger signame to be sent when the parent process dies
  1315. """
  1316. signum = getattr(signal, signame)
  1317. # http://linux.die.net/man/2/prctl
  1318. result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
  1319. if result != 0:
  1320. raise PrCtlError('prctl failed with error code %s' % result)
  1321. #
  1322. # Manually call the ioprio syscall. We could depend on other libs like psutil
  1323. # however this gets us enough of what we need to bitbake for now without the
  1324. # dependency
  1325. #
  1326. _unamearch = os.uname()[4]
  1327. IOPRIO_WHO_PROCESS = 1
  1328. IOPRIO_CLASS_SHIFT = 13
  1329. def ioprio_set(who, cls, value):
  1330. NR_ioprio_set = None
  1331. if _unamearch == "x86_64":
  1332. NR_ioprio_set = 251
  1333. elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
  1334. NR_ioprio_set = 289
  1335. elif _unamearch == "aarch64":
  1336. NR_ioprio_set = 30
  1337. if NR_ioprio_set:
  1338. ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
  1339. rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
  1340. if rc != 0:
  1341. raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
  1342. else:
  1343. bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
  1344. def set_process_name(name):
  1345. from ctypes import cdll, byref, create_string_buffer
  1346. # This is nice to have for debugging, not essential
  1347. try:
  1348. libc = cdll.LoadLibrary('libc.so.6')
  1349. buf = create_string_buffer(bytes(name, 'utf-8'))
  1350. libc.prctl(15, byref(buf), 0, 0, 0)
  1351. except:
  1352. pass
  1353. # export common proxies variables from datastore to environment
  1354. def export_proxies(d):
  1355. import os
  1356. variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
  1357. 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
  1358. 'GIT_PROXY_COMMAND']
  1359. exported = False
  1360. for v in variables:
  1361. if v in os.environ.keys():
  1362. exported = True
  1363. else:
  1364. v_proxy = d.getVar(v)
  1365. if v_proxy is not None:
  1366. os.environ[v] = v_proxy
  1367. exported = True
  1368. return exported
  1369. def load_plugins(logger, plugins, pluginpath):
  1370. def load_plugin(name):
  1371. logger.debug(1, 'Loading plugin %s' % name)
  1372. spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
  1373. if spec:
  1374. return spec.loader.load_module()
  1375. logger.debug(1, 'Loading plugins from %s...' % pluginpath)
  1376. expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
  1377. for ext in python_extensions)
  1378. files = itertools.chain.from_iterable(expanded)
  1379. names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
  1380. for name in names:
  1381. if name != '__init__':
  1382. plugin = load_plugin(name)
  1383. if hasattr(plugin, 'plugin_init'):
  1384. obj = plugin.plugin_init(plugins)
  1385. plugins.append(obj or plugin)
  1386. else:
  1387. plugins.append(plugin)
  1388. class LogCatcher(logging.Handler):
  1389. """Logging handler for collecting logged messages so you can check them later"""
  1390. def __init__(self):
  1391. self.messages = []
  1392. logging.Handler.__init__(self, logging.WARNING)
  1393. def emit(self, record):
  1394. self.messages.append(bb.build.logformatter.format(record))
  1395. def contains(self, message):
  1396. return (message in self.messages)