sstatesig.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603
  1. #
  2. # SPDX-License-Identifier: GPL-2.0-only
  3. #
  4. import bb.siggen
  5. import bb.runqueue
  6. import oe
  7. def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
  8. # Return True if we should keep the dependency, False to drop it
  9. def isNative(x):
  10. return x.endswith("-native")
  11. def isCross(x):
  12. return "-cross-" in x
  13. def isNativeSDK(x):
  14. return x.startswith("nativesdk-")
  15. def isKernel(mc, fn):
  16. inherits = " ".join(dataCaches[mc].inherits[fn])
  17. return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
  18. def isPackageGroup(mc, fn):
  19. inherits = " ".join(dataCaches[mc].inherits[fn])
  20. return "/packagegroup.bbclass" in inherits
  21. def isAllArch(mc, fn):
  22. inherits = " ".join(dataCaches[mc].inherits[fn])
  23. return "/allarch.bbclass" in inherits
  24. def isImage(mc, fn):
  25. return "/image.bbclass" in " ".join(dataCaches[mc].inherits[fn])
  26. depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
  27. mc, _ = bb.runqueue.split_mc(fn)
  28. # (Almost) always include our own inter-task dependencies (unless it comes
  29. # from a mcdepends). The exception is the special
  30. # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
  31. if recipename == depname and depmc == mc:
  32. if task == "do_kernel_configme" and deptaskname == "do_unpack_and_patch":
  33. return False
  34. return True
  35. # Exclude well defined recipe->dependency
  36. if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
  37. return False
  38. # Check for special wildcard
  39. if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
  40. return False
  41. # Don't change native/cross/nativesdk recipe dependencies any further
  42. if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
  43. return True
  44. # Only target packages beyond here
  45. # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
  46. if isPackageGroup(mc, fn) and isAllArch(mc, fn) and not isNative(depname):
  47. return False
  48. # Exclude well defined machine specific configurations which don't change ABI
  49. if depname in siggen.abisaferecipes and not isImage(mc, fn):
  50. return False
  51. # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
  52. # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
  53. # is machine specific.
  54. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
  55. # and we reccomend a kernel-module, we exclude the dependency.
  56. if dataCaches and isKernel(depmc, depmcfn) and not isKernel(mc, fn):
  57. for pkg in dataCaches[mc].runrecs[fn]:
  58. if " ".join(dataCaches[mc].runrecs[fn][pkg]).find("kernel-module-") != -1:
  59. return False
  60. # Default to keep dependencies
  61. return True
  62. def sstate_lockedsigs(d):
  63. sigs = {}
  64. types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
  65. for t in types:
  66. siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
  67. lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
  68. for ls in lockedsigs:
  69. pn, task, h = ls.split(":", 2)
  70. if pn not in sigs:
  71. sigs[pn] = {}
  72. sigs[pn][task] = [h, siggen_lockedsigs_var]
  73. return sigs
  74. class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
  75. name = "OEBasic"
  76. def init_rundepcheck(self, data):
  77. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  78. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  79. pass
  80. def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
  81. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
  82. class SignatureGeneratorOEBasicHashMixIn(object):
  83. supports_multiconfig_datacaches = True
  84. def init_rundepcheck(self, data):
  85. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  86. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  87. self.lockedsigs = sstate_lockedsigs(data)
  88. self.lockedhashes = {}
  89. self.lockedpnmap = {}
  90. self.lockedhashfn = {}
  91. self.machine = data.getVar("MACHINE")
  92. self.mismatch_msgs = []
  93. self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
  94. "").split()
  95. self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
  96. self.buildarch = data.getVar('BUILD_ARCH')
  97. self._internal = False
  98. pass
  99. def tasks_resolved(self, virtmap, virtpnmap, dataCache):
  100. # Translate virtual/xxx entries to PN values
  101. newabisafe = []
  102. for a in self.abisaferecipes:
  103. if a in virtpnmap:
  104. newabisafe.append(virtpnmap[a])
  105. else:
  106. newabisafe.append(a)
  107. self.abisaferecipes = newabisafe
  108. newsafedeps = []
  109. for a in self.saferecipedeps:
  110. a1, a2 = a.split("->")
  111. if a1 in virtpnmap:
  112. a1 = virtpnmap[a1]
  113. if a2 in virtpnmap:
  114. a2 = virtpnmap[a2]
  115. newsafedeps.append(a1 + "->" + a2)
  116. self.saferecipedeps = newsafedeps
  117. def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
  118. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
  119. def get_taskdata(self):
  120. return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata()
  121. def set_taskdata(self, data):
  122. self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3]
  123. super().set_taskdata(data[3:])
  124. def dump_sigs(self, dataCache, options):
  125. sigfile = os.getcwd() + "/locked-sigs.inc"
  126. bb.plain("Writing locked sigs to %s" % sigfile)
  127. self.dump_lockedsigs(sigfile)
  128. return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
  129. def prep_taskhash(self, tid, deps, dataCaches):
  130. super().prep_taskhash(tid, deps, dataCaches)
  131. if hasattr(self, "extramethod"):
  132. (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
  133. inherits = " ".join(dataCaches[mc].inherits[fn])
  134. if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
  135. self.extramethod[tid] = ":" + self.buildarch
  136. def get_taskhash(self, tid, deps, dataCaches):
  137. if tid in self.lockedhashes:
  138. if self.lockedhashes[tid]:
  139. return self.lockedhashes[tid]
  140. else:
  141. return super().get_taskhash(tid, deps, dataCaches)
  142. h = super().get_taskhash(tid, deps, dataCaches)
  143. (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  144. recipename = dataCaches[mc].pkg_fn[fn]
  145. self.lockedpnmap[fn] = recipename
  146. self.lockedhashfn[fn] = dataCaches[mc].hashfn[fn]
  147. unlocked = False
  148. if recipename in self.unlockedrecipes:
  149. unlocked = True
  150. else:
  151. def recipename_from_dep(dep):
  152. (depmc, _, _, depfn) = bb.runqueue.split_tid_mcfn(dep)
  153. return dataCaches[depmc].pkg_fn[depfn]
  154. # If any unlocked recipe is in the direct dependencies then the
  155. # current recipe should be unlocked as well.
  156. depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)]
  157. if any(x in y for y in depnames for x in self.unlockedrecipes):
  158. self.unlockedrecipes[recipename] = ''
  159. unlocked = True
  160. if not unlocked and recipename in self.lockedsigs:
  161. if task in self.lockedsigs[recipename]:
  162. h_locked = self.lockedsigs[recipename][task][0]
  163. var = self.lockedsigs[recipename][task][1]
  164. self.lockedhashes[tid] = h_locked
  165. self._internal = True
  166. unihash = self.get_unihash(tid)
  167. self._internal = False
  168. #bb.warn("Using %s %s %s" % (recipename, task, h))
  169. if h != h_locked and h_locked != unihash:
  170. self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
  171. % (recipename, task, h, h_locked, var))
  172. return h_locked
  173. self.lockedhashes[tid] = False
  174. #bb.warn("%s %s %s" % (recipename, task, h))
  175. return h
  176. def get_stampfile_hash(self, tid):
  177. if tid in self.lockedhashes and self.lockedhashes[tid]:
  178. return self.lockedhashes[tid]
  179. return super().get_stampfile_hash(tid)
  180. def get_unihash(self, tid):
  181. if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
  182. return self.lockedhashes[tid]
  183. return super().get_unihash(tid)
  184. def dump_sigtask(self, fn, task, stampbase, runtime):
  185. tid = fn + ":" + task
  186. if tid in self.lockedhashes and self.lockedhashes[tid]:
  187. return
  188. super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
  189. def dump_lockedsigs(self, sigfile, taskfilter=None):
  190. types = {}
  191. for tid in self.runtaskdeps:
  192. if taskfilter:
  193. if not tid in taskfilter:
  194. continue
  195. fn = bb.runqueue.fn_from_tid(tid)
  196. t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
  197. t = 't-' + t.replace('_', '-')
  198. if t not in types:
  199. types[t] = []
  200. types[t].append(tid)
  201. with open(sigfile, "w") as f:
  202. l = sorted(types)
  203. for t in l:
  204. f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
  205. types[t].sort()
  206. sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
  207. for tid in sortedtid:
  208. (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  209. if tid not in self.taskhash:
  210. continue
  211. f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
  212. f.write(' "\n')
  213. f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
  214. def dump_siglist(self, sigfile):
  215. with open(sigfile, "w") as f:
  216. tasks = []
  217. for taskitem in self.taskhash:
  218. (fn, task) = taskitem.rsplit(":", 1)
  219. pn = self.lockedpnmap[fn]
  220. tasks.append((pn, task, fn, self.taskhash[taskitem]))
  221. for (pn, task, fn, taskhash) in sorted(tasks):
  222. f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
  223. def checkhashes(self, sq_data, missed, found, d):
  224. warn_msgs = []
  225. error_msgs = []
  226. sstate_missing_msgs = []
  227. for tid in sq_data['hash']:
  228. if tid not in found:
  229. for pn in self.lockedsigs:
  230. taskname = bb.runqueue.taskname_from_tid(tid)
  231. if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()):
  232. if taskname == 'do_shared_workdir':
  233. continue
  234. sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
  235. % (pn, taskname, sq_data['hash'][tid]))
  236. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
  237. if checklevel == 'warn':
  238. warn_msgs += self.mismatch_msgs
  239. elif checklevel == 'error':
  240. error_msgs += self.mismatch_msgs
  241. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
  242. if checklevel == 'warn':
  243. warn_msgs += sstate_missing_msgs
  244. elif checklevel == 'error':
  245. error_msgs += sstate_missing_msgs
  246. if warn_msgs:
  247. bb.warn("\n".join(warn_msgs))
  248. if error_msgs:
  249. bb.fatal("\n".join(error_msgs))
  250. class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  251. name = "OEBasicHash"
  252. class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  253. name = "OEEquivHash"
  254. def init_rundepcheck(self, data):
  255. super().init_rundepcheck(data)
  256. self.server = data.getVar('BB_HASHSERVE')
  257. if not self.server:
  258. bb.fatal("OEEquivHash requires BB_HASHSERVE to be set")
  259. self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
  260. if not self.method:
  261. bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
  262. # Insert these classes into siggen's namespace so it can see and select them
  263. bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
  264. bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
  265. bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
  266. def find_siginfo(pn, taskname, taskhashlist, d):
  267. """ Find signature data files for comparison purposes """
  268. import fnmatch
  269. import glob
  270. if not taskname:
  271. # We have to derive pn and taskname
  272. key = pn
  273. splitit = key.split('.bb:')
  274. taskname = splitit[1]
  275. pn = os.path.basename(splitit[0]).split('_')[0]
  276. if key.startswith('virtual:native:'):
  277. pn = pn + '-native'
  278. hashfiles = {}
  279. filedates = {}
  280. def get_hashval(siginfo):
  281. if siginfo.endswith('.siginfo'):
  282. return siginfo.rpartition(':')[2].partition('_')[0]
  283. else:
  284. return siginfo.rpartition('.')[2]
  285. # First search in stamps dir
  286. localdata = d.createCopy()
  287. localdata.setVar('MULTIMACH_TARGET_SYS', '*')
  288. localdata.setVar('PN', pn)
  289. localdata.setVar('PV', '*')
  290. localdata.setVar('PR', '*')
  291. localdata.setVar('EXTENDPE', '')
  292. stamp = localdata.getVar('STAMP')
  293. if pn.startswith("gcc-source"):
  294. # gcc-source shared workdir is a special case :(
  295. stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
  296. filespec = '%s.%s.sigdata.*' % (stamp, taskname)
  297. foundall = False
  298. import glob
  299. for fullpath in glob.glob(filespec):
  300. match = False
  301. if taskhashlist:
  302. for taskhash in taskhashlist:
  303. if fullpath.endswith('.%s' % taskhash):
  304. hashfiles[taskhash] = fullpath
  305. if len(hashfiles) == len(taskhashlist):
  306. foundall = True
  307. break
  308. else:
  309. try:
  310. filedates[fullpath] = os.stat(fullpath).st_mtime
  311. except OSError:
  312. continue
  313. hashval = get_hashval(fullpath)
  314. hashfiles[hashval] = fullpath
  315. if not taskhashlist or (len(filedates) < 2 and not foundall):
  316. # That didn't work, look in sstate-cache
  317. hashes = taskhashlist or ['?' * 64]
  318. localdata = bb.data.createCopy(d)
  319. for hashval in hashes:
  320. localdata.setVar('PACKAGE_ARCH', '*')
  321. localdata.setVar('TARGET_VENDOR', '*')
  322. localdata.setVar('TARGET_OS', '*')
  323. localdata.setVar('PN', pn)
  324. localdata.setVar('PV', '*')
  325. localdata.setVar('PR', '*')
  326. localdata.setVar('BB_TASKHASH', hashval)
  327. swspec = localdata.getVar('SSTATE_SWSPEC')
  328. if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
  329. localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
  330. elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
  331. localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
  332. sstatename = taskname[3:]
  333. filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
  334. matchedfiles = glob.glob(filespec)
  335. for fullpath in matchedfiles:
  336. actual_hashval = get_hashval(fullpath)
  337. if actual_hashval in hashfiles:
  338. continue
  339. hashfiles[hashval] = fullpath
  340. if not taskhashlist:
  341. try:
  342. filedates[fullpath] = os.stat(fullpath).st_mtime
  343. except:
  344. continue
  345. if taskhashlist:
  346. return hashfiles
  347. else:
  348. return filedates
  349. bb.siggen.find_siginfo = find_siginfo
  350. def sstate_get_manifest_filename(task, d):
  351. """
  352. Return the sstate manifest file path for a particular task.
  353. Also returns the datastore that can be used to query related variables.
  354. """
  355. d2 = d.createCopy()
  356. extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
  357. if extrainf:
  358. d2.setVar("SSTATE_MANMACH", extrainf)
  359. return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
  360. def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
  361. d2 = d
  362. variant = ''
  363. curr_variant = ''
  364. if d.getVar("BBEXTENDCURR") == "multilib":
  365. curr_variant = d.getVar("BBEXTENDVARIANT")
  366. if "virtclass-multilib" not in d.getVar("OVERRIDES"):
  367. curr_variant = "invalid"
  368. if taskdata2.startswith("virtual:multilib"):
  369. variant = taskdata2.split(":")[2]
  370. if curr_variant != variant:
  371. if variant not in multilibcache:
  372. multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
  373. d2 = multilibcache[variant]
  374. if taskdata.endswith("-native"):
  375. pkgarchs = ["${BUILD_ARCH}"]
  376. elif taskdata.startswith("nativesdk-"):
  377. pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
  378. elif "-cross-canadian" in taskdata:
  379. pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
  380. elif "-cross-" in taskdata:
  381. pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
  382. elif "-crosssdk" in taskdata:
  383. pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
  384. else:
  385. pkgarchs = ['${MACHINE_ARCH}']
  386. pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
  387. pkgarchs.append('allarch')
  388. pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
  389. for pkgarch in pkgarchs:
  390. manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
  391. if os.path.exists(manifest):
  392. return manifest, d2
  393. bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
  394. return None, d2
  395. def OEOuthashBasic(path, sigfile, task, d):
  396. """
  397. Basic output hash function
  398. Calculates the output hash of a task by hashing all output file metadata,
  399. and file contents.
  400. """
  401. import hashlib
  402. import stat
  403. import pwd
  404. import grp
  405. def update_hash(s):
  406. s = s.encode('utf-8')
  407. h.update(s)
  408. if sigfile:
  409. sigfile.write(s)
  410. h = hashlib.sha256()
  411. prev_dir = os.getcwd()
  412. include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
  413. if "package_write_" in task or task == "package_qa":
  414. include_owners = False
  415. include_timestamps = False
  416. if task == "package":
  417. include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1'
  418. extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
  419. try:
  420. os.chdir(path)
  421. update_hash("OEOuthashBasic\n")
  422. if extra_content:
  423. update_hash(extra_content + "\n")
  424. # It is only currently useful to get equivalent hashes for things that
  425. # can be restored from sstate. Since the sstate object is named using
  426. # SSTATE_PKGSPEC and the task name, those should be included in the
  427. # output hash calculation.
  428. update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
  429. update_hash("task=%s\n" % task)
  430. for root, dirs, files in os.walk('.', topdown=True):
  431. # Sort directories to ensure consistent ordering when recursing
  432. dirs.sort()
  433. files.sort()
  434. def process(path):
  435. s = os.lstat(path)
  436. if stat.S_ISDIR(s.st_mode):
  437. update_hash('d')
  438. elif stat.S_ISCHR(s.st_mode):
  439. update_hash('c')
  440. elif stat.S_ISBLK(s.st_mode):
  441. update_hash('b')
  442. elif stat.S_ISSOCK(s.st_mode):
  443. update_hash('s')
  444. elif stat.S_ISLNK(s.st_mode):
  445. update_hash('l')
  446. elif stat.S_ISFIFO(s.st_mode):
  447. update_hash('p')
  448. else:
  449. update_hash('-')
  450. def add_perm(mask, on, off='-'):
  451. if mask & s.st_mode:
  452. update_hash(on)
  453. else:
  454. update_hash(off)
  455. add_perm(stat.S_IRUSR, 'r')
  456. add_perm(stat.S_IWUSR, 'w')
  457. if stat.S_ISUID & s.st_mode:
  458. add_perm(stat.S_IXUSR, 's', 'S')
  459. else:
  460. add_perm(stat.S_IXUSR, 'x')
  461. add_perm(stat.S_IRGRP, 'r')
  462. add_perm(stat.S_IWGRP, 'w')
  463. if stat.S_ISGID & s.st_mode:
  464. add_perm(stat.S_IXGRP, 's', 'S')
  465. else:
  466. add_perm(stat.S_IXGRP, 'x')
  467. add_perm(stat.S_IROTH, 'r')
  468. add_perm(stat.S_IWOTH, 'w')
  469. if stat.S_ISVTX & s.st_mode:
  470. update_hash('t')
  471. else:
  472. add_perm(stat.S_IXOTH, 'x')
  473. if include_owners:
  474. try:
  475. update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
  476. update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
  477. except KeyError:
  478. bb.warn("KeyError in %s" % path)
  479. raise
  480. if include_timestamps:
  481. update_hash(" %10d" % s.st_mtime)
  482. update_hash(" ")
  483. if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
  484. update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
  485. else:
  486. update_hash(" " * 9)
  487. update_hash(" ")
  488. if stat.S_ISREG(s.st_mode):
  489. update_hash("%10d" % s.st_size)
  490. else:
  491. update_hash(" " * 10)
  492. update_hash(" ")
  493. fh = hashlib.sha256()
  494. if stat.S_ISREG(s.st_mode):
  495. # Hash file contents
  496. with open(path, 'rb') as d:
  497. for chunk in iter(lambda: d.read(4096), b""):
  498. fh.update(chunk)
  499. update_hash(fh.hexdigest())
  500. else:
  501. update_hash(" " * len(fh.hexdigest()))
  502. update_hash(" %s" % path)
  503. if stat.S_ISLNK(s.st_mode):
  504. update_hash(" -> %s" % os.readlink(path))
  505. update_hash("\n")
  506. # Process this directory and all its child files
  507. process(root)
  508. for f in files:
  509. if f == 'fixmepath':
  510. continue
  511. process(os.path.join(root, f))
  512. finally:
  513. os.chdir(prev_dir)
  514. return h.hexdigest()