sstatesig.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602
  1. #
  2. # SPDX-License-Identifier: GPL-2.0-only
  3. #
  4. import bb.siggen
  5. import bb.runqueue
  6. import oe
  7. def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
  8. # Return True if we should keep the dependency, False to drop it
  9. def isNative(x):
  10. return x.endswith("-native")
  11. def isCross(x):
  12. return "-cross-" in x
  13. def isNativeSDK(x):
  14. return x.startswith("nativesdk-")
  15. def isKernel(mc, fn):
  16. inherits = " ".join(dataCaches[mc].inherits[fn])
  17. return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
  18. def isPackageGroup(mc, fn):
  19. inherits = " ".join(dataCaches[mc].inherits[fn])
  20. return "/packagegroup.bbclass" in inherits
  21. def isAllArch(mc, fn):
  22. inherits = " ".join(dataCaches[mc].inherits[fn])
  23. return "/allarch.bbclass" in inherits
  24. def isImage(mc, fn):
  25. return "/image.bbclass" in " ".join(dataCaches[mc].inherits[fn])
  26. depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
  27. mc, _ = bb.runqueue.split_mc(fn)
  28. # (Almost) always include our own inter-task dependencies (unless it comes
  29. # from a mcdepends). The exception is the special
  30. # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
  31. if recipename == depname and depmc == mc:
  32. if task == "do_kernel_configme" and deptaskname == "do_unpack_and_patch":
  33. return False
  34. return True
  35. # Exclude well defined recipe->dependency
  36. if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
  37. return False
  38. # Check for special wildcard
  39. if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
  40. return False
  41. # Don't change native/cross/nativesdk recipe dependencies any further
  42. if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
  43. return True
  44. # Only target packages beyond here
  45. # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
  46. if isPackageGroup(mc, fn) and isAllArch(mc, fn) and not isNative(depname):
  47. return False
  48. # Exclude well defined machine specific configurations which don't change ABI
  49. if depname in siggen.abisaferecipes and not isImage(mc, fn):
  50. return False
  51. # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
  52. # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
  53. # is machine specific.
  54. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
  55. # and we reccomend a kernel-module, we exclude the dependency.
  56. if dataCaches and isKernel(depmc, depmcfn) and not isKernel(mc, fn):
  57. for pkg in dataCaches[mc].runrecs[fn]:
  58. if " ".join(dataCaches[mc].runrecs[fn][pkg]).find("kernel-module-") != -1:
  59. return False
  60. # Default to keep dependencies
  61. return True
  62. def sstate_lockedsigs(d):
  63. sigs = {}
  64. types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
  65. for t in types:
  66. siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
  67. lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
  68. for ls in lockedsigs:
  69. pn, task, h = ls.split(":", 2)
  70. if pn not in sigs:
  71. sigs[pn] = {}
  72. sigs[pn][task] = [h, siggen_lockedsigs_var]
  73. return sigs
  74. class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
  75. name = "OEBasic"
  76. def init_rundepcheck(self, data):
  77. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  78. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  79. pass
  80. def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
  81. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
  82. class SignatureGeneratorOEBasicHashMixIn(object):
  83. supports_multiconfig_datacaches = True
  84. def init_rundepcheck(self, data):
  85. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  86. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  87. self.lockedsigs = sstate_lockedsigs(data)
  88. self.lockedhashes = {}
  89. self.lockedpnmap = {}
  90. self.lockedhashfn = {}
  91. self.machine = data.getVar("MACHINE")
  92. self.mismatch_msgs = []
  93. self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
  94. "").split()
  95. self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
  96. self.buildarch = data.getVar('BUILD_ARCH')
  97. self._internal = False
  98. pass
  99. def tasks_resolved(self, virtmap, virtpnmap, dataCache):
  100. # Translate virtual/xxx entries to PN values
  101. newabisafe = []
  102. for a in self.abisaferecipes:
  103. if a in virtpnmap:
  104. newabisafe.append(virtpnmap[a])
  105. else:
  106. newabisafe.append(a)
  107. self.abisaferecipes = newabisafe
  108. newsafedeps = []
  109. for a in self.saferecipedeps:
  110. a1, a2 = a.split("->")
  111. if a1 in virtpnmap:
  112. a1 = virtpnmap[a1]
  113. if a2 in virtpnmap:
  114. a2 = virtpnmap[a2]
  115. newsafedeps.append(a1 + "->" + a2)
  116. self.saferecipedeps = newsafedeps
  117. def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
  118. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
  119. def get_taskdata(self):
  120. return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata()
  121. def set_taskdata(self, data):
  122. self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3]
  123. super().set_taskdata(data[3:])
  124. def dump_sigs(self, dataCache, options):
  125. sigfile = os.getcwd() + "/locked-sigs.inc"
  126. bb.plain("Writing locked sigs to %s" % sigfile)
  127. self.dump_lockedsigs(sigfile)
  128. return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
  129. def prep_taskhash(self, tid, deps, dataCaches):
  130. super().prep_taskhash(tid, deps, dataCaches)
  131. if hasattr(self, "extramethod"):
  132. (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
  133. inherits = " ".join(dataCaches[mc].inherits[fn])
  134. if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
  135. self.extramethod[tid] = ":" + self.buildarch
  136. def get_taskhash(self, tid, deps, dataCaches):
  137. if tid in self.lockedhashes:
  138. if self.lockedhashes[tid]:
  139. return self.lockedhashes[tid]
  140. else:
  141. return super().get_taskhash(tid, deps, dataCaches)
  142. # get_taskhash will call get_unihash internally in the parent class, we
  143. # need to disable our filter of it whilst this runs else
  144. # incorrect hashes can be calculated.
  145. self._internal = True
  146. h = super().get_taskhash(tid, deps, dataCaches)
  147. self._internal = False
  148. (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  149. recipename = dataCaches[mc].pkg_fn[fn]
  150. self.lockedpnmap[fn] = recipename
  151. self.lockedhashfn[fn] = dataCaches[mc].hashfn[fn]
  152. unlocked = False
  153. if recipename in self.unlockedrecipes:
  154. unlocked = True
  155. else:
  156. def recipename_from_dep(dep):
  157. (depmc, _, _, depfn) = bb.runqueue.split_tid_mcfn(dep)
  158. return dataCaches[depmc].pkg_fn[depfn]
  159. # If any unlocked recipe is in the direct dependencies then the
  160. # current recipe should be unlocked as well.
  161. depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)]
  162. if any(x in y for y in depnames for x in self.unlockedrecipes):
  163. self.unlockedrecipes[recipename] = ''
  164. unlocked = True
  165. if not unlocked and recipename in self.lockedsigs:
  166. if task in self.lockedsigs[recipename]:
  167. h_locked = self.lockedsigs[recipename][task][0]
  168. var = self.lockedsigs[recipename][task][1]
  169. self.lockedhashes[tid] = h_locked
  170. self._internal = True
  171. unihash = self.get_unihash(tid)
  172. self._internal = False
  173. #bb.warn("Using %s %s %s" % (recipename, task, h))
  174. if h != h_locked and h_locked != unihash:
  175. self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
  176. % (recipename, task, h, h_locked, var))
  177. return h_locked
  178. self.lockedhashes[tid] = False
  179. #bb.warn("%s %s %s" % (recipename, task, h))
  180. return h
  181. def get_stampfile_hash(self, tid):
  182. if tid in self.lockedhashes and self.lockedhashes[tid]:
  183. return self.lockedhashes[tid]
  184. return super().get_stampfile_hash(tid)
  185. def get_unihash(self, tid):
  186. if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
  187. return self.lockedhashes[tid]
  188. return super().get_unihash(tid)
  189. def dump_sigtask(self, fn, task, stampbase, runtime):
  190. tid = fn + ":" + task
  191. if tid in self.lockedhashes and self.lockedhashes[tid]:
  192. return
  193. super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
  194. def dump_lockedsigs(self, sigfile, taskfilter=None):
  195. types = {}
  196. for tid in self.runtaskdeps:
  197. if taskfilter:
  198. if not tid in taskfilter:
  199. continue
  200. fn = bb.runqueue.fn_from_tid(tid)
  201. t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
  202. t = 't-' + t.replace('_', '-')
  203. if t not in types:
  204. types[t] = []
  205. types[t].append(tid)
  206. with open(sigfile, "w") as f:
  207. l = sorted(types)
  208. for t in l:
  209. f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
  210. types[t].sort()
  211. sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
  212. for tid in sortedtid:
  213. (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  214. if tid not in self.taskhash:
  215. continue
  216. f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
  217. f.write(' "\n')
  218. f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
  219. def dump_siglist(self, sigfile):
  220. with open(sigfile, "w") as f:
  221. tasks = []
  222. for taskitem in self.taskhash:
  223. (fn, task) = taskitem.rsplit(":", 1)
  224. pn = self.lockedpnmap[fn]
  225. tasks.append((pn, task, fn, self.taskhash[taskitem]))
  226. for (pn, task, fn, taskhash) in sorted(tasks):
  227. f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
  228. def checkhashes(self, sq_data, missed, found, d):
  229. warn_msgs = []
  230. error_msgs = []
  231. sstate_missing_msgs = []
  232. for tid in sq_data['hash']:
  233. if tid not in found:
  234. for pn in self.lockedsigs:
  235. taskname = bb.runqueue.taskname_from_tid(tid)
  236. if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()):
  237. if taskname == 'do_shared_workdir':
  238. continue
  239. sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
  240. % (pn, taskname, sq_data['hash'][tid]))
  241. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
  242. if checklevel == 'warn':
  243. warn_msgs += self.mismatch_msgs
  244. elif checklevel == 'error':
  245. error_msgs += self.mismatch_msgs
  246. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
  247. if checklevel == 'warn':
  248. warn_msgs += sstate_missing_msgs
  249. elif checklevel == 'error':
  250. error_msgs += sstate_missing_msgs
  251. if warn_msgs:
  252. bb.warn("\n".join(warn_msgs))
  253. if error_msgs:
  254. bb.fatal("\n".join(error_msgs))
  255. class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  256. name = "OEBasicHash"
  257. class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  258. name = "OEEquivHash"
  259. def init_rundepcheck(self, data):
  260. super().init_rundepcheck(data)
  261. self.server = data.getVar('BB_HASHSERVE')
  262. if not self.server:
  263. bb.fatal("OEEquivHash requires BB_HASHSERVE to be set")
  264. self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
  265. if not self.method:
  266. bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
  267. # Insert these classes into siggen's namespace so it can see and select them
  268. bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
  269. bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
  270. bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
  271. def find_siginfo(pn, taskname, taskhashlist, d):
  272. """ Find signature data files for comparison purposes """
  273. import fnmatch
  274. import glob
  275. if not taskname:
  276. # We have to derive pn and taskname
  277. key = pn
  278. splitit = key.split('.bb:')
  279. taskname = splitit[1]
  280. pn = os.path.basename(splitit[0]).split('_')[0]
  281. if key.startswith('virtual:native:'):
  282. pn = pn + '-native'
  283. hashfiles = {}
  284. filedates = {}
  285. def get_hashval(siginfo):
  286. if siginfo.endswith('.siginfo'):
  287. return siginfo.rpartition(':')[2].partition('_')[0]
  288. else:
  289. return siginfo.rpartition('.')[2]
  290. # First search in stamps dir
  291. localdata = d.createCopy()
  292. localdata.setVar('MULTIMACH_TARGET_SYS', '*')
  293. localdata.setVar('PN', pn)
  294. localdata.setVar('PV', '*')
  295. localdata.setVar('PR', '*')
  296. localdata.setVar('EXTENDPE', '')
  297. stamp = localdata.getVar('STAMP')
  298. if pn.startswith("gcc-source"):
  299. # gcc-source shared workdir is a special case :(
  300. stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
  301. filespec = '%s.%s.sigdata.*' % (stamp, taskname)
  302. foundall = False
  303. import glob
  304. for fullpath in glob.glob(filespec):
  305. match = False
  306. if taskhashlist:
  307. for taskhash in taskhashlist:
  308. if fullpath.endswith('.%s' % taskhash):
  309. hashfiles[taskhash] = fullpath
  310. if len(hashfiles) == len(taskhashlist):
  311. foundall = True
  312. break
  313. else:
  314. try:
  315. filedates[fullpath] = os.stat(fullpath).st_mtime
  316. except OSError:
  317. continue
  318. hashval = get_hashval(fullpath)
  319. hashfiles[hashval] = fullpath
  320. if not taskhashlist or (len(filedates) < 2 and not foundall):
  321. # That didn't work, look in sstate-cache
  322. hashes = taskhashlist or ['?' * 64]
  323. localdata = bb.data.createCopy(d)
  324. for hashval in hashes:
  325. localdata.setVar('PACKAGE_ARCH', '*')
  326. localdata.setVar('TARGET_VENDOR', '*')
  327. localdata.setVar('TARGET_OS', '*')
  328. localdata.setVar('PN', pn)
  329. localdata.setVar('PV', '*')
  330. localdata.setVar('PR', '*')
  331. localdata.setVar('BB_TASKHASH', hashval)
  332. swspec = localdata.getVar('SSTATE_SWSPEC')
  333. if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
  334. localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
  335. elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
  336. localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
  337. sstatename = taskname[3:]
  338. filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
  339. matchedfiles = glob.glob(filespec)
  340. for fullpath in matchedfiles:
  341. actual_hashval = get_hashval(fullpath)
  342. if actual_hashval in hashfiles:
  343. continue
  344. hashfiles[hashval] = fullpath
  345. if not taskhashlist:
  346. try:
  347. filedates[fullpath] = os.stat(fullpath).st_mtime
  348. except:
  349. continue
  350. if taskhashlist:
  351. return hashfiles
  352. else:
  353. return filedates
  354. bb.siggen.find_siginfo = find_siginfo
  355. def sstate_get_manifest_filename(task, d):
  356. """
  357. Return the sstate manifest file path for a particular task.
  358. Also returns the datastore that can be used to query related variables.
  359. """
  360. d2 = d.createCopy()
  361. extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
  362. if extrainf:
  363. d2.setVar("SSTATE_MANMACH", extrainf)
  364. return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
  365. def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
  366. d2 = d
  367. variant = ''
  368. curr_variant = ''
  369. if d.getVar("BBEXTENDCURR") == "multilib":
  370. curr_variant = d.getVar("BBEXTENDVARIANT")
  371. if "virtclass-multilib" not in d.getVar("OVERRIDES"):
  372. curr_variant = "invalid"
  373. if taskdata2.startswith("virtual:multilib"):
  374. variant = taskdata2.split(":")[2]
  375. if curr_variant != variant:
  376. if variant not in multilibcache:
  377. multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
  378. d2 = multilibcache[variant]
  379. if taskdata.endswith("-native"):
  380. pkgarchs = ["${BUILD_ARCH}"]
  381. elif taskdata.startswith("nativesdk-"):
  382. pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
  383. elif "-cross-canadian" in taskdata:
  384. pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
  385. elif "-cross-" in taskdata:
  386. pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
  387. elif "-crosssdk" in taskdata:
  388. pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
  389. else:
  390. pkgarchs = ['${MACHINE_ARCH}']
  391. pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
  392. pkgarchs.append('allarch')
  393. pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
  394. for pkgarch in pkgarchs:
  395. manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
  396. if os.path.exists(manifest):
  397. return manifest, d2
  398. bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
  399. return None, d2
  400. def OEOuthashBasic(path, sigfile, task, d):
  401. """
  402. Basic output hash function
  403. Calculates the output hash of a task by hashing all output file metadata,
  404. and file contents.
  405. """
  406. import hashlib
  407. import stat
  408. import pwd
  409. import grp
  410. def update_hash(s):
  411. s = s.encode('utf-8')
  412. h.update(s)
  413. if sigfile:
  414. sigfile.write(s)
  415. h = hashlib.sha256()
  416. prev_dir = os.getcwd()
  417. include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
  418. if "package_write_" in task or task == "package_qa":
  419. include_owners = False
  420. extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
  421. try:
  422. os.chdir(path)
  423. update_hash("OEOuthashBasic\n")
  424. if extra_content:
  425. update_hash(extra_content + "\n")
  426. # It is only currently useful to get equivalent hashes for things that
  427. # can be restored from sstate. Since the sstate object is named using
  428. # SSTATE_PKGSPEC and the task name, those should be included in the
  429. # output hash calculation.
  430. update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
  431. update_hash("task=%s\n" % task)
  432. for root, dirs, files in os.walk('.', topdown=True):
  433. # Sort directories to ensure consistent ordering when recursing
  434. dirs.sort()
  435. files.sort()
  436. def process(path):
  437. s = os.lstat(path)
  438. if stat.S_ISDIR(s.st_mode):
  439. update_hash('d')
  440. elif stat.S_ISCHR(s.st_mode):
  441. update_hash('c')
  442. elif stat.S_ISBLK(s.st_mode):
  443. update_hash('b')
  444. elif stat.S_ISSOCK(s.st_mode):
  445. update_hash('s')
  446. elif stat.S_ISLNK(s.st_mode):
  447. update_hash('l')
  448. elif stat.S_ISFIFO(s.st_mode):
  449. update_hash('p')
  450. else:
  451. update_hash('-')
  452. def add_perm(mask, on, off='-'):
  453. if mask & s.st_mode:
  454. update_hash(on)
  455. else:
  456. update_hash(off)
  457. add_perm(stat.S_IRUSR, 'r')
  458. add_perm(stat.S_IWUSR, 'w')
  459. if stat.S_ISUID & s.st_mode:
  460. add_perm(stat.S_IXUSR, 's', 'S')
  461. else:
  462. add_perm(stat.S_IXUSR, 'x')
  463. add_perm(stat.S_IRGRP, 'r')
  464. add_perm(stat.S_IWGRP, 'w')
  465. if stat.S_ISGID & s.st_mode:
  466. add_perm(stat.S_IXGRP, 's', 'S')
  467. else:
  468. add_perm(stat.S_IXGRP, 'x')
  469. add_perm(stat.S_IROTH, 'r')
  470. add_perm(stat.S_IWOTH, 'w')
  471. if stat.S_ISVTX & s.st_mode:
  472. update_hash('t')
  473. else:
  474. add_perm(stat.S_IXOTH, 'x')
  475. if include_owners:
  476. try:
  477. update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
  478. update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
  479. except KeyError:
  480. bb.warn("KeyError in %s" % path)
  481. raise
  482. update_hash(" ")
  483. if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
  484. update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
  485. else:
  486. update_hash(" " * 9)
  487. update_hash(" ")
  488. if stat.S_ISREG(s.st_mode):
  489. update_hash("%10d" % s.st_size)
  490. else:
  491. update_hash(" " * 10)
  492. update_hash(" ")
  493. fh = hashlib.sha256()
  494. if stat.S_ISREG(s.st_mode):
  495. # Hash file contents
  496. with open(path, 'rb') as d:
  497. for chunk in iter(lambda: d.read(4096), b""):
  498. fh.update(chunk)
  499. update_hash(fh.hexdigest())
  500. else:
  501. update_hash(" " * len(fh.hexdigest()))
  502. update_hash(" %s" % path)
  503. if stat.S_ISLNK(s.st_mode):
  504. update_hash(" -> %s" % os.readlink(path))
  505. update_hash("\n")
  506. # Process this directory and all its child files
  507. process(root)
  508. for f in files:
  509. if f == 'fixmepath':
  510. continue
  511. process(os.path.join(root, f))
  512. finally:
  513. os.chdir(prev_dir)
  514. return h.hexdigest()