siggen.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641
  1. import hashlib
  2. import logging
  3. import os
  4. import re
  5. import tempfile
  6. import pickle
  7. import bb.data
  8. import difflib
  9. from bb.checksum import FileChecksumCache
  10. logger = logging.getLogger('BitBake.SigGen')
  11. def init(d):
  12. siggens = [obj for obj in globals().values()
  13. if type(obj) is type and issubclass(obj, SignatureGenerator)]
  14. desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
  15. for sg in siggens:
  16. if desired == sg.name:
  17. return sg(d)
  18. break
  19. else:
  20. logger.error("Invalid signature generator '%s', using default 'noop'\n"
  21. "Available generators: %s", desired,
  22. ', '.join(obj.name for obj in siggens))
  23. return SignatureGenerator(d)
  24. class SignatureGenerator(object):
  25. """
  26. """
  27. name = "noop"
  28. def __init__(self, data):
  29. self.basehash = {}
  30. self.taskhash = {}
  31. self.runtaskdeps = {}
  32. self.file_checksum_values = {}
  33. self.taints = {}
  34. def finalise(self, fn, d, varient):
  35. return
  36. def get_taskhash(self, fn, task, deps, dataCache):
  37. return "0"
  38. def writeout_file_checksum_cache(self):
  39. """Write/update the file checksum cache onto disk"""
  40. return
  41. def stampfile(self, stampbase, file_name, taskname, extrainfo):
  42. return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
  43. def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
  44. return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
  45. def dump_sigtask(self, fn, task, stampbase, runtime):
  46. return
  47. def invalidate_task(self, task, d, fn):
  48. bb.build.del_stamp(task, d, fn)
  49. def dump_sigs(self, dataCache, options):
  50. return
  51. def get_taskdata(self):
  52. return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash)
  53. def set_taskdata(self, data):
  54. self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash = data
  55. class SignatureGeneratorBasic(SignatureGenerator):
  56. """
  57. """
  58. name = "basic"
  59. def __init__(self, data):
  60. self.basehash = {}
  61. self.taskhash = {}
  62. self.taskdeps = {}
  63. self.runtaskdeps = {}
  64. self.file_checksum_values = {}
  65. self.taints = {}
  66. self.gendeps = {}
  67. self.lookupcache = {}
  68. self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
  69. self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
  70. self.taskwhitelist = None
  71. self.init_rundepcheck(data)
  72. checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
  73. if checksum_cache_file:
  74. self.checksum_cache = FileChecksumCache()
  75. self.checksum_cache.init_cache(data, checksum_cache_file)
  76. else:
  77. self.checksum_cache = None
  78. def init_rundepcheck(self, data):
  79. self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None
  80. if self.taskwhitelist:
  81. self.twl = re.compile(self.taskwhitelist)
  82. else:
  83. self.twl = None
  84. def _build_data(self, fn, d):
  85. ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
  86. tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
  87. taskdeps = {}
  88. basehash = {}
  89. for task in tasklist:
  90. data = lookupcache[task]
  91. if data is None:
  92. bb.error("Task %s from %s seems to be empty?!" % (task, fn))
  93. data = ''
  94. gendeps[task] -= self.basewhitelist
  95. newdeps = gendeps[task]
  96. seen = set()
  97. while newdeps:
  98. nextdeps = newdeps
  99. seen |= nextdeps
  100. newdeps = set()
  101. for dep in nextdeps:
  102. if dep in self.basewhitelist:
  103. continue
  104. gendeps[dep] -= self.basewhitelist
  105. newdeps |= gendeps[dep]
  106. newdeps -= seen
  107. alldeps = sorted(seen)
  108. for dep in alldeps:
  109. data = data + dep
  110. var = lookupcache[dep]
  111. if var is not None:
  112. data = data + str(var)
  113. datahash = hashlib.md5(data.encode("utf-8")).hexdigest()
  114. k = fn + "." + task
  115. if not ignore_mismatch and k in self.basehash and self.basehash[k] != datahash:
  116. bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], datahash))
  117. self.basehash[k] = datahash
  118. taskdeps[task] = alldeps
  119. self.taskdeps[fn] = taskdeps
  120. self.gendeps[fn] = gendeps
  121. self.lookupcache[fn] = lookupcache
  122. return taskdeps
  123. def finalise(self, fn, d, variant):
  124. mc = d.getVar("__BBMULTICONFIG", False) or ""
  125. if variant or mc:
  126. fn = bb.cache.realfn2virtual(fn, variant, mc)
  127. try:
  128. taskdeps = self._build_data(fn, d)
  129. except:
  130. bb.warn("Error during finalise of %s" % fn)
  131. raise
  132. #Slow but can be useful for debugging mismatched basehashes
  133. #for task in self.taskdeps[fn]:
  134. # self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
  135. for task in taskdeps:
  136. d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
  137. def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
  138. # Return True if we should keep the dependency, False to drop it
  139. # We only manipulate the dependencies for packages not in the whitelist
  140. if self.twl and not self.twl.search(recipename):
  141. # then process the actual dependencies
  142. if self.twl.search(depname):
  143. return False
  144. return True
  145. def read_taint(self, fn, task, stampbase):
  146. taint = None
  147. try:
  148. with open(stampbase + '.' + task + '.taint', 'r') as taintf:
  149. taint = taintf.read()
  150. except IOError:
  151. pass
  152. return taint
  153. def get_taskhash(self, fn, task, deps, dataCache):
  154. k = fn + "." + task
  155. data = dataCache.basetaskhash[k]
  156. self.basehash[k] = data
  157. self.runtaskdeps[k] = []
  158. self.file_checksum_values[k] = []
  159. recipename = dataCache.pkg_fn[fn]
  160. for dep in sorted(deps, key=clean_basepath):
  161. depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
  162. if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
  163. continue
  164. if dep not in self.taskhash:
  165. bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
  166. data = data + self.taskhash[dep]
  167. self.runtaskdeps[k].append(dep)
  168. if task in dataCache.file_checksums[fn]:
  169. if self.checksum_cache:
  170. checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename)
  171. else:
  172. checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
  173. for (f,cs) in checksums:
  174. self.file_checksum_values[k].append((f,cs))
  175. if cs:
  176. data = data + cs
  177. taskdep = dataCache.task_deps[fn]
  178. if 'nostamp' in taskdep and task in taskdep['nostamp']:
  179. # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
  180. import uuid
  181. taint = str(uuid.uuid4())
  182. data = data + taint
  183. self.taints[k] = "nostamp:" + taint
  184. taint = self.read_taint(fn, task, dataCache.stamp[fn])
  185. if taint:
  186. data = data + taint
  187. self.taints[k] = taint
  188. logger.warning("%s is tainted from a forced run" % k)
  189. h = hashlib.md5(data.encode("utf-8")).hexdigest()
  190. self.taskhash[k] = h
  191. #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
  192. return h
  193. def writeout_file_checksum_cache(self):
  194. """Write/update the file checksum cache onto disk"""
  195. if self.checksum_cache:
  196. self.checksum_cache.save_extras()
  197. self.checksum_cache.save_merge()
  198. else:
  199. bb.fetch2.fetcher_parse_save()
  200. bb.fetch2.fetcher_parse_done()
  201. def dump_sigtask(self, fn, task, stampbase, runtime):
  202. k = fn + "." + task
  203. referencestamp = stampbase
  204. if isinstance(runtime, str) and runtime.startswith("customfile"):
  205. sigfile = stampbase
  206. referencestamp = runtime[11:]
  207. elif runtime and k in self.taskhash:
  208. sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
  209. else:
  210. sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
  211. bb.utils.mkdirhier(os.path.dirname(sigfile))
  212. data = {}
  213. data['task'] = task
  214. data['basewhitelist'] = self.basewhitelist
  215. data['taskwhitelist'] = self.taskwhitelist
  216. data['taskdeps'] = self.taskdeps[fn][task]
  217. data['basehash'] = self.basehash[k]
  218. data['gendeps'] = {}
  219. data['varvals'] = {}
  220. data['varvals'][task] = self.lookupcache[fn][task]
  221. for dep in self.taskdeps[fn][task]:
  222. if dep in self.basewhitelist:
  223. continue
  224. data['gendeps'][dep] = self.gendeps[fn][dep]
  225. data['varvals'][dep] = self.lookupcache[fn][dep]
  226. if runtime and k in self.taskhash:
  227. data['runtaskdeps'] = self.runtaskdeps[k]
  228. data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
  229. data['runtaskhashes'] = {}
  230. for dep in data['runtaskdeps']:
  231. data['runtaskhashes'][dep] = self.taskhash[dep]
  232. data['taskhash'] = self.taskhash[k]
  233. taint = self.read_taint(fn, task, referencestamp)
  234. if taint:
  235. data['taint'] = taint
  236. if runtime and k in self.taints:
  237. if 'nostamp:' in self.taints[k]:
  238. data['taint'] = self.taints[k]
  239. computed_basehash = calc_basehash(data)
  240. if computed_basehash != self.basehash[k]:
  241. bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k))
  242. if runtime and k in self.taskhash:
  243. computed_taskhash = calc_taskhash(data)
  244. if computed_taskhash != self.taskhash[k]:
  245. bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k))
  246. sigfile = sigfile.replace(self.taskhash[k], computed_taskhash)
  247. fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
  248. try:
  249. with os.fdopen(fd, "wb") as stream:
  250. p = pickle.dump(data, stream, -1)
  251. stream.flush()
  252. os.chmod(tmpfile, 0o664)
  253. os.rename(tmpfile, sigfile)
  254. except (OSError, IOError) as err:
  255. try:
  256. os.unlink(tmpfile)
  257. except OSError:
  258. pass
  259. raise err
  260. def dump_sigfn(self, fn, dataCaches, options):
  261. if fn in self.taskdeps:
  262. for task in self.taskdeps[fn]:
  263. tid = fn + ":" + task
  264. (mc, _, _) = bb.runqueue.split_tid(tid)
  265. k = fn + "." + task
  266. if k not in self.taskhash:
  267. continue
  268. if dataCaches[mc].basetaskhash[k] != self.basehash[k]:
  269. bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
  270. bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[k], self.basehash[k]))
  271. self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
  272. class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
  273. name = "basichash"
  274. def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
  275. if taskname != "do_setscene" and taskname.endswith("_setscene"):
  276. k = fn + "." + taskname[:-9]
  277. else:
  278. k = fn + "." + taskname
  279. if clean:
  280. h = "*"
  281. elif k in self.taskhash:
  282. h = self.taskhash[k]
  283. else:
  284. # If k is not in basehash, then error
  285. h = self.basehash[k]
  286. return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
  287. def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
  288. return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
  289. def invalidate_task(self, task, d, fn):
  290. bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
  291. bb.build.write_taint(task, d, fn)
  292. def dump_this_task(outfile, d):
  293. import bb.parse
  294. fn = d.getVar("BB_FILENAME")
  295. task = "do_" + d.getVar("BB_CURRENTTASK")
  296. referencestamp = bb.build.stamp_internal(task, d, None, True)
  297. bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
  298. def clean_basepath(a):
  299. mc = None
  300. if a.startswith("multiconfig:"):
  301. _, mc, a = a.split(":", 2)
  302. b = a.rsplit("/", 2)[1] + '/' + a.rsplit("/", 2)[2]
  303. if a.startswith("virtual:"):
  304. b = b + ":" + a.rsplit(":", 1)[0]
  305. if mc:
  306. b = b + ":multiconfig:" + mc
  307. return b
  308. def clean_basepaths(a):
  309. b = {}
  310. for x in a:
  311. b[clean_basepath(x)] = a[x]
  312. return b
  313. def clean_basepaths_list(a):
  314. b = []
  315. for x in a:
  316. b.append(clean_basepath(x))
  317. return b
  318. def compare_sigfiles(a, b, recursecb=None, collapsed=False):
  319. output = []
  320. with open(a, 'rb') as f:
  321. p1 = pickle.Unpickler(f)
  322. a_data = p1.load()
  323. with open(b, 'rb') as f:
  324. p2 = pickle.Unpickler(f)
  325. b_data = p2.load()
  326. def dict_diff(a, b, whitelist=set()):
  327. sa = set(a.keys())
  328. sb = set(b.keys())
  329. common = sa & sb
  330. changed = set()
  331. for i in common:
  332. if a[i] != b[i] and i not in whitelist:
  333. changed.add(i)
  334. added = sb - sa
  335. removed = sa - sb
  336. return changed, added, removed
  337. def file_checksums_diff(a, b):
  338. from collections import Counter
  339. # Handle old siginfo format
  340. if isinstance(a, dict):
  341. a = [(os.path.basename(f), cs) for f, cs in a.items()]
  342. if isinstance(b, dict):
  343. b = [(os.path.basename(f), cs) for f, cs in b.items()]
  344. # Compare lists, ensuring we can handle duplicate filenames if they exist
  345. removedcount = Counter(a)
  346. removedcount.subtract(b)
  347. addedcount = Counter(b)
  348. addedcount.subtract(a)
  349. added = []
  350. for x in b:
  351. if addedcount[x] > 0:
  352. addedcount[x] -= 1
  353. added.append(x)
  354. removed = []
  355. changed = []
  356. for x in a:
  357. if removedcount[x] > 0:
  358. removedcount[x] -= 1
  359. for y in added:
  360. if y[0] == x[0]:
  361. changed.append((x[0], x[1], y[1]))
  362. added.remove(y)
  363. break
  364. else:
  365. removed.append(x)
  366. added = [x[0] for x in added]
  367. removed = [x[0] for x in removed]
  368. return changed, added, removed
  369. if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
  370. output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist']))
  371. if a_data['basewhitelist'] and b_data['basewhitelist']:
  372. output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
  373. if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
  374. output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
  375. if a_data['taskwhitelist'] and b_data['taskwhitelist']:
  376. output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
  377. if a_data['taskdeps'] != b_data['taskdeps']:
  378. output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
  379. if a_data['basehash'] != b_data['basehash'] and not collapsed:
  380. output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
  381. changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
  382. if changed:
  383. for dep in changed:
  384. output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
  385. if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
  386. output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
  387. if added:
  388. for dep in added:
  389. output.append("Dependency on variable %s was added" % (dep))
  390. if removed:
  391. for dep in removed:
  392. output.append("Dependency on Variable %s was removed" % (dep))
  393. changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
  394. if changed:
  395. for dep in changed:
  396. oldval = a_data['varvals'][dep]
  397. newval = b_data['varvals'][dep]
  398. if newval and oldval and ('\n' in oldval or '\n' in newval):
  399. diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
  400. # Cut off the first two lines, since we aren't interested in
  401. # the old/new filename (they are blank anyway in this case)
  402. difflines = list(diff)[2:]
  403. output.append("Variable %s value changed:\n%s" % (dep, '\n'.join(difflines)))
  404. else:
  405. output.append("Variable %s value changed from '%s' to '%s'" % (dep, oldval, newval))
  406. if not 'file_checksum_values' in a_data:
  407. a_data['file_checksum_values'] = {}
  408. if not 'file_checksum_values' in b_data:
  409. b_data['file_checksum_values'] = {}
  410. changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
  411. if changed:
  412. for f, old, new in changed:
  413. output.append("Checksum for file %s changed from %s to %s" % (f, old, new))
  414. if added:
  415. for f in added:
  416. output.append("Dependency on checksum of file %s was added" % (f))
  417. if removed:
  418. for f in removed:
  419. output.append("Dependency on checksum of file %s was removed" % (f))
  420. if not 'runtaskdeps' in a_data:
  421. a_data['runtaskdeps'] = {}
  422. if not 'runtaskdeps' in b_data:
  423. b_data['runtaskdeps'] = {}
  424. if not collapsed:
  425. if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
  426. changed = ["Number of task dependencies changed"]
  427. else:
  428. changed = []
  429. for idx, task in enumerate(a_data['runtaskdeps']):
  430. a = a_data['runtaskdeps'][idx]
  431. b = b_data['runtaskdeps'][idx]
  432. if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
  433. changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
  434. if changed:
  435. clean_a = clean_basepaths_list(a_data['runtaskdeps'])
  436. clean_b = clean_basepaths_list(b_data['runtaskdeps'])
  437. if clean_a != clean_b:
  438. output.append("runtaskdeps changed from %s to %s" % (clean_a, clean_b))
  439. else:
  440. output.append("runtaskdeps changed:")
  441. output.append("\n".join(changed))
  442. if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
  443. a = a_data['runtaskhashes']
  444. b = b_data['runtaskhashes']
  445. changed, added, removed = dict_diff(a, b)
  446. if added:
  447. for dep in added:
  448. bdep_found = False
  449. if removed:
  450. for bdep in removed:
  451. if b[dep] == a[bdep]:
  452. #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
  453. bdep_found = True
  454. if not bdep_found:
  455. output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep]))
  456. if removed:
  457. for dep in removed:
  458. adep_found = False
  459. if added:
  460. for adep in added:
  461. if b[adep] == a[dep]:
  462. #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
  463. adep_found = True
  464. if not adep_found:
  465. output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep]))
  466. if changed:
  467. for dep in changed:
  468. if not collapsed:
  469. output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
  470. if callable(recursecb):
  471. recout = recursecb(dep, a[dep], b[dep])
  472. if recout:
  473. if collapsed:
  474. output.extend(recout)
  475. else:
  476. # If a dependent hash changed, might as well print the line above and then defer to the changes in
  477. # that hash since in all likelyhood, they're the same changes this task also saw.
  478. output = [output[-1]] + recout
  479. a_taint = a_data.get('taint', None)
  480. b_taint = b_data.get('taint', None)
  481. if a_taint != b_taint:
  482. output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
  483. return output
  484. def calc_basehash(sigdata):
  485. task = sigdata['task']
  486. basedata = sigdata['varvals'][task]
  487. if basedata is None:
  488. basedata = ''
  489. alldeps = sigdata['taskdeps']
  490. for dep in alldeps:
  491. basedata = basedata + dep
  492. val = sigdata['varvals'][dep]
  493. if val is not None:
  494. basedata = basedata + str(val)
  495. return hashlib.md5(basedata.encode("utf-8")).hexdigest()
  496. def calc_taskhash(sigdata):
  497. data = sigdata['basehash']
  498. for dep in sigdata['runtaskdeps']:
  499. data = data + sigdata['runtaskhashes'][dep]
  500. for c in sigdata['file_checksum_values']:
  501. data = data + c[1]
  502. if 'taint' in sigdata:
  503. if 'nostamp:' in sigdata['taint']:
  504. data = data + sigdata['taint'][8:]
  505. else:
  506. data = data + sigdata['taint']
  507. return hashlib.md5(data.encode("utf-8")).hexdigest()
  508. def dump_sigfile(a):
  509. output = []
  510. with open(a, 'rb') as f:
  511. p1 = pickle.Unpickler(f)
  512. a_data = p1.load()
  513. output.append("basewhitelist: %s" % (a_data['basewhitelist']))
  514. output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
  515. output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
  516. output.append("basehash: %s" % (a_data['basehash']))
  517. for dep in a_data['gendeps']:
  518. output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
  519. for dep in a_data['varvals']:
  520. output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
  521. if 'runtaskdeps' in a_data:
  522. output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
  523. if 'file_checksum_values' in a_data:
  524. output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
  525. if 'runtaskhashes' in a_data:
  526. for dep in a_data['runtaskhashes']:
  527. output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
  528. if 'taint' in a_data:
  529. output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
  530. if 'task' in a_data:
  531. computed_basehash = calc_basehash(a_data)
  532. output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
  533. else:
  534. output.append("Unable to compute base hash")
  535. computed_taskhash = calc_taskhash(a_data)
  536. output.append("Computed task hash is %s" % computed_taskhash)
  537. return output