sstatesig.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720
  1. #
  2. # SPDX-License-Identifier: GPL-2.0-only
  3. #
  4. import bb.siggen
  5. import oe
  6. def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
  7. # Return True if we should keep the dependency, False to drop it
  8. def isNative(x):
  9. return x.endswith("-native")
  10. def isCross(x):
  11. return "-cross-" in x
  12. def isNativeSDK(x):
  13. return x.startswith("nativesdk-")
  14. def isKernel(fn):
  15. inherits = " ".join(dataCache.inherits[fn])
  16. return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
  17. def isPackageGroup(fn):
  18. inherits = " ".join(dataCache.inherits[fn])
  19. return "/packagegroup.bbclass" in inherits
  20. def isAllArch(fn):
  21. inherits = " ".join(dataCache.inherits[fn])
  22. return "/allarch.bbclass" in inherits
  23. def isImage(fn):
  24. return "/image.bbclass" in " ".join(dataCache.inherits[fn])
  25. # (Almost) always include our own inter-task dependencies.
  26. # The exception is the special do_kernel_configme->do_unpack_and_patch
  27. # dependency from archiver.bbclass.
  28. if recipename == depname:
  29. if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"):
  30. return False
  31. return True
  32. # Exclude well defined recipe->dependency
  33. if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
  34. return False
  35. # Check for special wildcard
  36. if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
  37. return False
  38. # Don't change native/cross/nativesdk recipe dependencies any further
  39. if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
  40. return True
  41. # Only target packages beyond here
  42. # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
  43. if isPackageGroup(fn) and isAllArch(fn) and not isNative(depname):
  44. return False
  45. # Exclude well defined machine specific configurations which don't change ABI
  46. if depname in siggen.abisaferecipes and not isImage(fn):
  47. return False
  48. # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
  49. # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
  50. # is machine specific.
  51. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
  52. # and we reccomend a kernel-module, we exclude the dependency.
  53. depfn = dep.rsplit(".", 1)[0]
  54. if dataCache and isKernel(depfn) and not isKernel(fn):
  55. for pkg in dataCache.runrecs[fn]:
  56. if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
  57. return False
  58. # Default to keep dependencies
  59. return True
  60. def sstate_lockedsigs(d):
  61. sigs = {}
  62. types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
  63. for t in types:
  64. siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
  65. lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
  66. for ls in lockedsigs:
  67. pn, task, h = ls.split(":", 2)
  68. if pn not in sigs:
  69. sigs[pn] = {}
  70. sigs[pn][task] = [h, siggen_lockedsigs_var]
  71. return sigs
  72. class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
  73. name = "OEBasic"
  74. def init_rundepcheck(self, data):
  75. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  76. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  77. pass
  78. def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
  79. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
  80. class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
  81. name = "OEBasicHash"
  82. def init_rundepcheck(self, data):
  83. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  84. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  85. self.lockedsigs = sstate_lockedsigs(data)
  86. self.lockedhashes = {}
  87. self.lockedpnmap = {}
  88. self.lockedhashfn = {}
  89. self.machine = data.getVar("MACHINE")
  90. self.mismatch_msgs = []
  91. self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
  92. "").split()
  93. self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
  94. pass
  95. def tasks_resolved(self, virtmap, virtpnmap, dataCache):
  96. # Translate virtual/xxx entries to PN values
  97. newabisafe = []
  98. for a in self.abisaferecipes:
  99. if a in virtpnmap:
  100. newabisafe.append(virtpnmap[a])
  101. else:
  102. newabisafe.append(a)
  103. self.abisaferecipes = newabisafe
  104. newsafedeps = []
  105. for a in self.saferecipedeps:
  106. a1, a2 = a.split("->")
  107. if a1 in virtpnmap:
  108. a1 = virtpnmap[a1]
  109. if a2 in virtpnmap:
  110. a2 = virtpnmap[a2]
  111. newsafedeps.append(a1 + "->" + a2)
  112. self.saferecipedeps = newsafedeps
  113. def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
  114. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
  115. def get_taskdata(self):
  116. data = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskdata()
  117. return (data, self.lockedpnmap, self.lockedhashfn)
  118. def set_taskdata(self, data):
  119. coredata, self.lockedpnmap, self.lockedhashfn = data
  120. super(bb.siggen.SignatureGeneratorBasicHash, self).set_taskdata(coredata)
  121. def dump_sigs(self, dataCache, options):
  122. sigfile = os.getcwd() + "/locked-sigs.inc"
  123. bb.plain("Writing locked sigs to %s" % sigfile)
  124. self.dump_lockedsigs(sigfile)
  125. return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
  126. def get_taskhash(self, fn, task, deps, dataCache):
  127. h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache)
  128. recipename = dataCache.pkg_fn[fn]
  129. self.lockedpnmap[fn] = recipename
  130. self.lockedhashfn[fn] = dataCache.hashfn[fn]
  131. unlocked = False
  132. if recipename in self.unlockedrecipes:
  133. unlocked = True
  134. else:
  135. def get_mc(tid):
  136. tid = tid.rsplit('.', 1)[0]
  137. if tid.startswith('multiconfig:'):
  138. elems = tid.split(':')
  139. return elems[1]
  140. def recipename_from_dep(dep):
  141. # The dep entry will look something like
  142. # /path/path/recipename.bb.task, virtual:native:/p/foo.bb.task,
  143. # ...
  144. fn = dep.rsplit('.', 1)[0]
  145. return dataCache.pkg_fn[fn]
  146. mc = get_mc(fn)
  147. # If any unlocked recipe is in the direct dependencies then the
  148. # current recipe should be unlocked as well.
  149. depnames = [ recipename_from_dep(x) for x in deps if mc == get_mc(x)]
  150. if any(x in y for y in depnames for x in self.unlockedrecipes):
  151. self.unlockedrecipes[recipename] = ''
  152. unlocked = True
  153. if not unlocked and recipename in self.lockedsigs:
  154. if task in self.lockedsigs[recipename]:
  155. k = fn + "." + task
  156. h_locked = self.lockedsigs[recipename][task][0]
  157. var = self.lockedsigs[recipename][task][1]
  158. self.lockedhashes[k] = h_locked
  159. self.taskhash[k] = h_locked
  160. #bb.warn("Using %s %s %s" % (recipename, task, h))
  161. if h != h_locked:
  162. self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
  163. % (recipename, task, h, h_locked, var))
  164. return h_locked
  165. #bb.warn("%s %s %s" % (recipename, task, h))
  166. return h
  167. def dump_sigtask(self, fn, task, stampbase, runtime):
  168. k = fn + "." + task
  169. if k in self.lockedhashes:
  170. return
  171. super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
  172. def dump_lockedsigs(self, sigfile, taskfilter=None):
  173. types = {}
  174. for k in self.runtaskdeps:
  175. if taskfilter:
  176. if not k in taskfilter:
  177. continue
  178. fn = k.rsplit(".",1)[0]
  179. t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
  180. t = 't-' + t.replace('_', '-')
  181. if t not in types:
  182. types[t] = []
  183. types[t].append(k)
  184. with open(sigfile, "w") as f:
  185. l = sorted(types)
  186. for t in l:
  187. f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
  188. types[t].sort()
  189. sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]])
  190. for k in sortedk:
  191. fn = k.rsplit(".",1)[0]
  192. task = k.rsplit(".",1)[1]
  193. if k not in self.taskhash:
  194. continue
  195. f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n")
  196. f.write(' "\n')
  197. f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
  198. def dump_siglist(self, sigfile):
  199. with open(sigfile, "w") as f:
  200. tasks = []
  201. for taskitem in self.taskhash:
  202. (fn, task) = taskitem.rsplit(".", 1)
  203. pn = self.lockedpnmap[fn]
  204. tasks.append((pn, task, fn, self.taskhash[taskitem]))
  205. for (pn, task, fn, taskhash) in sorted(tasks):
  206. f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash))
  207. def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d):
  208. warn_msgs = []
  209. error_msgs = []
  210. sstate_missing_msgs = []
  211. for task in range(len(sq_fn)):
  212. if task not in ret:
  213. for pn in self.lockedsigs:
  214. if sq_hash[task] in iter(self.lockedsigs[pn].values()):
  215. if sq_task[task] == 'do_shared_workdir':
  216. continue
  217. sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
  218. % (pn, sq_task[task], sq_hash[task]))
  219. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
  220. if checklevel == 'warn':
  221. warn_msgs += self.mismatch_msgs
  222. elif checklevel == 'error':
  223. error_msgs += self.mismatch_msgs
  224. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
  225. if checklevel == 'warn':
  226. warn_msgs += sstate_missing_msgs
  227. elif checklevel == 'error':
  228. error_msgs += sstate_missing_msgs
  229. if warn_msgs:
  230. bb.warn("\n".join(warn_msgs))
  231. if error_msgs:
  232. bb.fatal("\n".join(error_msgs))
  233. class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHash):
  234. name = "OEEquivHash"
  235. def init_rundepcheck(self, data):
  236. super().init_rundepcheck(data)
  237. self.server = data.getVar('SSTATE_HASHEQUIV_SERVER')
  238. self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
  239. self.unihashes = bb.persist_data.persist('SSTATESIG_UNIHASH_CACHE_v1_' + self.method.replace('.', '_'), data)
  240. def get_taskdata(self):
  241. return (self.server, self.method) + super().get_taskdata()
  242. def set_taskdata(self, data):
  243. self.server, self.method = data[:2]
  244. super().set_taskdata(data[2:])
  245. def __get_task_unihash_key(self, task):
  246. # TODO: The key only *needs* to be the taskhash, the task is just
  247. # convenient
  248. return '%s:%s' % (task, self.taskhash[task])
  249. def get_stampfile_hash(self, task):
  250. if task in self.taskhash:
  251. # If a unique hash is reported, use it as the stampfile hash. This
  252. # ensures that if a task won't be re-run if the taskhash changes,
  253. # but it would result in the same output hash
  254. unihash = self.unihashes.get(self.__get_task_unihash_key(task))
  255. if unihash is not None:
  256. return unihash
  257. return super().get_stampfile_hash(task)
  258. def get_unihash(self, task):
  259. import urllib
  260. import json
  261. taskhash = self.taskhash[task]
  262. key = self.__get_task_unihash_key(task)
  263. # TODO: This cache can grow unbounded. It probably only needs to keep
  264. # for each task
  265. unihash = self.unihashes.get(key)
  266. if unihash is not None:
  267. return unihash
  268. # In the absence of being able to discover a unique hash from the
  269. # server, make it be equivalent to the taskhash. The unique "hash" only
  270. # really needs to be a unique string (not even necessarily a hash), but
  271. # making it match the taskhash has a few advantages:
  272. #
  273. # 1) All of the sstate code that assumes hashes can be the same
  274. # 2) It provides maximal compatibility with builders that don't use
  275. # an equivalency server
  276. # 3) The value is easy for multiple independent builders to derive the
  277. # same unique hash from the same input. This means that if the
  278. # independent builders find the same taskhash, but it isn't reported
  279. # to the server, there is a better chance that they will agree on
  280. # the unique hash.
  281. unihash = taskhash
  282. try:
  283. url = '%s/v1/equivalent?%s' % (self.server,
  284. urllib.parse.urlencode({'method': self.method, 'taskhash': self.taskhash[task]}))
  285. request = urllib.request.Request(url)
  286. response = urllib.request.urlopen(request)
  287. data = response.read().decode('utf-8')
  288. json_data = json.loads(data)
  289. if json_data:
  290. unihash = json_data['unihash']
  291. # A unique hash equal to the taskhash is not very interesting,
  292. # so it is reported it at debug level 2. If they differ, that
  293. # is much more interesting, so it is reported at debug level 1
  294. bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, task, self.server))
  295. else:
  296. bb.debug(2, 'No reported unihash for %s:%s from %s' % (task, taskhash, self.server))
  297. except urllib.error.URLError as e:
  298. bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
  299. except (KeyError, json.JSONDecodeError) as e:
  300. bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e)))
  301. self.unihashes[key] = unihash
  302. return unihash
  303. def report_unihash(self, path, task, d):
  304. import urllib
  305. import json
  306. import tempfile
  307. import base64
  308. import importlib
  309. taskhash = d.getVar('BB_TASKHASH')
  310. unihash = d.getVar('BB_UNIHASH')
  311. report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
  312. tempdir = d.getVar('T')
  313. fn = d.getVar('BB_FILENAME')
  314. key = fn + '.do_' + task + ':' + taskhash
  315. # Sanity checks
  316. cache_unihash = self.unihashes.get(key)
  317. if cache_unihash is None:
  318. bb.fatal('%s not in unihash cache. Please report this error' % key)
  319. if cache_unihash != unihash:
  320. bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
  321. sigfile = None
  322. sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
  323. sigfile_link = "depsig.do_%s" % task
  324. try:
  325. sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
  326. locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
  327. (module, method) = self.method.rsplit('.', 1)
  328. locs['method'] = getattr(importlib.import_module(module), method)
  329. outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
  330. try:
  331. url = '%s/v1/equivalent' % self.server
  332. task_data = {
  333. 'taskhash': taskhash,
  334. 'method': self.method,
  335. 'outhash': outhash,
  336. 'unihash': unihash,
  337. 'owner': d.getVar('SSTATE_HASHEQUIV_OWNER')
  338. }
  339. if report_taskdata:
  340. sigfile.seek(0)
  341. task_data['PN'] = d.getVar('PN')
  342. task_data['PV'] = d.getVar('PV')
  343. task_data['PR'] = d.getVar('PR')
  344. task_data['task'] = task
  345. task_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
  346. headers = {'content-type': 'application/json'}
  347. request = urllib.request.Request(url, json.dumps(task_data).encode('utf-8'), headers)
  348. response = urllib.request.urlopen(request)
  349. data = response.read().decode('utf-8')
  350. json_data = json.loads(data)
  351. new_unihash = json_data['unihash']
  352. if new_unihash != unihash:
  353. bb.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
  354. else:
  355. bb.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
  356. except urllib.error.URLError as e:
  357. bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
  358. except (KeyError, json.JSONDecodeError) as e:
  359. bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e)))
  360. finally:
  361. if sigfile:
  362. sigfile.close()
  363. sigfile_link_path = os.path.join(tempdir, sigfile_link)
  364. bb.utils.remove(sigfile_link_path)
  365. try:
  366. os.symlink(sigfile_name, sigfile_link_path)
  367. except OSError:
  368. pass
  369. # Insert these classes into siggen's namespace so it can see and select them
  370. bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
  371. bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
  372. bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
  373. def find_siginfo(pn, taskname, taskhashlist, d):
  374. """ Find signature data files for comparison purposes """
  375. import fnmatch
  376. import glob
  377. if not taskname:
  378. # We have to derive pn and taskname
  379. key = pn
  380. splitit = key.split('.bb.')
  381. taskname = splitit[1]
  382. pn = os.path.basename(splitit[0]).split('_')[0]
  383. if key.startswith('virtual:native:'):
  384. pn = pn + '-native'
  385. hashfiles = {}
  386. filedates = {}
  387. def get_hashval(siginfo):
  388. if siginfo.endswith('.siginfo'):
  389. return siginfo.rpartition(':')[2].partition('_')[0]
  390. else:
  391. return siginfo.rpartition('.')[2]
  392. # First search in stamps dir
  393. localdata = d.createCopy()
  394. localdata.setVar('MULTIMACH_TARGET_SYS', '*')
  395. localdata.setVar('PN', pn)
  396. localdata.setVar('PV', '*')
  397. localdata.setVar('PR', '*')
  398. localdata.setVar('EXTENDPE', '')
  399. stamp = localdata.getVar('STAMP')
  400. if pn.startswith("gcc-source"):
  401. # gcc-source shared workdir is a special case :(
  402. stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
  403. filespec = '%s.%s.sigdata.*' % (stamp, taskname)
  404. foundall = False
  405. import glob
  406. for fullpath in glob.glob(filespec):
  407. match = False
  408. if taskhashlist:
  409. for taskhash in taskhashlist:
  410. if fullpath.endswith('.%s' % taskhash):
  411. hashfiles[taskhash] = fullpath
  412. if len(hashfiles) == len(taskhashlist):
  413. foundall = True
  414. break
  415. else:
  416. try:
  417. filedates[fullpath] = os.stat(fullpath).st_mtime
  418. except OSError:
  419. continue
  420. hashval = get_hashval(fullpath)
  421. hashfiles[hashval] = fullpath
  422. if not taskhashlist or (len(filedates) < 2 and not foundall):
  423. # That didn't work, look in sstate-cache
  424. hashes = taskhashlist or ['?' * 64]
  425. localdata = bb.data.createCopy(d)
  426. for hashval in hashes:
  427. localdata.setVar('PACKAGE_ARCH', '*')
  428. localdata.setVar('TARGET_VENDOR', '*')
  429. localdata.setVar('TARGET_OS', '*')
  430. localdata.setVar('PN', pn)
  431. localdata.setVar('PV', '*')
  432. localdata.setVar('PR', '*')
  433. localdata.setVar('BB_TASKHASH', hashval)
  434. swspec = localdata.getVar('SSTATE_SWSPEC')
  435. if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
  436. localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
  437. elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
  438. localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
  439. sstatename = taskname[3:]
  440. filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
  441. matchedfiles = glob.glob(filespec)
  442. for fullpath in matchedfiles:
  443. actual_hashval = get_hashval(fullpath)
  444. if actual_hashval in hashfiles:
  445. continue
  446. hashfiles[hashval] = fullpath
  447. if not taskhashlist:
  448. try:
  449. filedates[fullpath] = os.stat(fullpath).st_mtime
  450. except:
  451. continue
  452. if taskhashlist:
  453. return hashfiles
  454. else:
  455. return filedates
  456. bb.siggen.find_siginfo = find_siginfo
  457. def sstate_get_manifest_filename(task, d):
  458. """
  459. Return the sstate manifest file path for a particular task.
  460. Also returns the datastore that can be used to query related variables.
  461. """
  462. d2 = d.createCopy()
  463. extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
  464. if extrainf:
  465. d2.setVar("SSTATE_MANMACH", extrainf)
  466. return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
  467. def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
  468. d2 = d
  469. variant = ''
  470. curr_variant = ''
  471. if d.getVar("BBEXTENDCURR") == "multilib":
  472. curr_variant = d.getVar("BBEXTENDVARIANT")
  473. if "virtclass-multilib" not in d.getVar("OVERRIDES"):
  474. curr_variant = "invalid"
  475. if taskdata2.startswith("virtual:multilib"):
  476. variant = taskdata2.split(":")[2]
  477. if curr_variant != variant:
  478. if variant not in multilibcache:
  479. multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
  480. d2 = multilibcache[variant]
  481. if taskdata.endswith("-native"):
  482. pkgarchs = ["${BUILD_ARCH}"]
  483. elif taskdata.startswith("nativesdk-"):
  484. pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
  485. elif "-cross-canadian" in taskdata:
  486. pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
  487. elif "-cross-" in taskdata:
  488. pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
  489. elif "-crosssdk" in taskdata:
  490. pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
  491. else:
  492. pkgarchs = ['${MACHINE_ARCH}']
  493. pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
  494. pkgarchs.append('allarch')
  495. pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
  496. for pkgarch in pkgarchs:
  497. manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
  498. if os.path.exists(manifest):
  499. return manifest, d2
  500. bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
  501. return None, d2
  502. def OEOuthashBasic(path, sigfile, task, d):
  503. """
  504. Basic output hash function
  505. Calculates the output hash of a task by hashing all output file metadata,
  506. and file contents.
  507. """
  508. import hashlib
  509. import stat
  510. import pwd
  511. import grp
  512. def update_hash(s):
  513. s = s.encode('utf-8')
  514. h.update(s)
  515. if sigfile:
  516. sigfile.write(s)
  517. h = hashlib.sha256()
  518. prev_dir = os.getcwd()
  519. include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
  520. try:
  521. os.chdir(path)
  522. update_hash("OEOuthashBasic\n")
  523. # It is only currently useful to get equivalent hashes for things that
  524. # can be restored from sstate. Since the sstate object is named using
  525. # SSTATE_PKGSPEC and the task name, those should be included in the
  526. # output hash calculation.
  527. update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
  528. update_hash("task=%s\n" % task)
  529. for root, dirs, files in os.walk('.', topdown=True):
  530. # Sort directories to ensure consistent ordering when recursing
  531. dirs.sort()
  532. files.sort()
  533. def process(path):
  534. s = os.lstat(path)
  535. if stat.S_ISDIR(s.st_mode):
  536. update_hash('d')
  537. elif stat.S_ISCHR(s.st_mode):
  538. update_hash('c')
  539. elif stat.S_ISBLK(s.st_mode):
  540. update_hash('b')
  541. elif stat.S_ISSOCK(s.st_mode):
  542. update_hash('s')
  543. elif stat.S_ISLNK(s.st_mode):
  544. update_hash('l')
  545. elif stat.S_ISFIFO(s.st_mode):
  546. update_hash('p')
  547. else:
  548. update_hash('-')
  549. def add_perm(mask, on, off='-'):
  550. if mask & s.st_mode:
  551. update_hash(on)
  552. else:
  553. update_hash(off)
  554. add_perm(stat.S_IRUSR, 'r')
  555. add_perm(stat.S_IWUSR, 'w')
  556. if stat.S_ISUID & s.st_mode:
  557. add_perm(stat.S_IXUSR, 's', 'S')
  558. else:
  559. add_perm(stat.S_IXUSR, 'x')
  560. add_perm(stat.S_IRGRP, 'r')
  561. add_perm(stat.S_IWGRP, 'w')
  562. if stat.S_ISGID & s.st_mode:
  563. add_perm(stat.S_IXGRP, 's', 'S')
  564. else:
  565. add_perm(stat.S_IXGRP, 'x')
  566. add_perm(stat.S_IROTH, 'r')
  567. add_perm(stat.S_IWOTH, 'w')
  568. if stat.S_ISVTX & s.st_mode:
  569. update_hash('t')
  570. else:
  571. add_perm(stat.S_IXOTH, 'x')
  572. if include_owners:
  573. update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
  574. update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
  575. update_hash(" ")
  576. if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
  577. update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
  578. else:
  579. update_hash(" " * 9)
  580. update_hash(" ")
  581. if stat.S_ISREG(s.st_mode):
  582. update_hash("%10d" % s.st_size)
  583. else:
  584. update_hash(" " * 10)
  585. update_hash(" ")
  586. fh = hashlib.sha256()
  587. if stat.S_ISREG(s.st_mode):
  588. # Hash file contents
  589. with open(path, 'rb') as d:
  590. for chunk in iter(lambda: d.read(4096), b""):
  591. fh.update(chunk)
  592. update_hash(fh.hexdigest())
  593. else:
  594. update_hash(" " * len(fh.hexdigest()))
  595. update_hash(" %s" % path)
  596. if stat.S_ISLNK(s.st_mode):
  597. update_hash(" -> %s" % os.readlink(path))
  598. update_hash("\n")
  599. # Process this directory and all its child files
  600. process(root)
  601. for f in files:
  602. if f == 'fixmepath':
  603. continue
  604. process(os.path.join(root, f))
  605. finally:
  606. os.chdir(prev_dir)
  607. return h.hexdigest()