archiver.bbclass 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602
  1. # ex:ts=4:sw=4:sts=4:et
  2. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  3. #
  4. # This bbclass is used for creating archive for:
  5. # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
  6. # 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
  7. # 3) configured source: ARCHIVER_MODE[src] = "configured"
  8. # 4) source mirror: ARCHIVE_MODE[src] = "mirror"
  9. # 5) The patches between do_unpack and do_patch:
  10. # ARCHIVER_MODE[diff] = "1"
  11. # And you can set the one that you'd like to exclude from the diff:
  12. # ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
  13. # 6) The environment data, similar to 'bitbake -e recipe':
  14. # ARCHIVER_MODE[dumpdata] = "1"
  15. # 7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
  16. # 8) Whether output the .src.rpm package:
  17. # ARCHIVER_MODE[srpm] = "1"
  18. # 9) Filter the license, the recipe whose license in
  19. # COPYLEFT_LICENSE_INCLUDE will be included, and in
  20. # COPYLEFT_LICENSE_EXCLUDE will be excluded.
  21. # COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
  22. # COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
  23. # 10) The recipe type that will be archived:
  24. # COPYLEFT_RECIPE_TYPES = 'target'
  25. # 11) The source mirror mode:
  26. # ARCHIVER_MODE[mirror] = "split" (default): Sources are split into
  27. # per-recipe directories in a similar way to other archiver modes.
  28. # Post-processing may be required to produce a single mirror directory.
  29. # This does however allow inspection of duplicate sources and more
  30. # intelligent handling.
  31. # ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single
  32. # directory suitable for direct use as a mirror. Duplicate sources are
  33. # ignored.
  34. # 12) Source mirror exclusions:
  35. # ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror.
  36. # This may be used for sources which you are already publishing yourself
  37. # (e.g. if the URI starts with 'https://mysite.com/' and your mirror is
  38. # going to be published to the same site). It may also be used to exclude
  39. # local files (with the prefix 'file://') if these will be provided as part
  40. # of an archive of the layers themselves.
  41. #
  42. # Create archive for all the recipe types
  43. COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
  44. inherit copyleft_filter
  45. ARCHIVER_MODE[srpm] ?= "0"
  46. ARCHIVER_MODE[src] ?= "patched"
  47. ARCHIVER_MODE[diff] ?= "0"
  48. ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
  49. ARCHIVER_MODE[dumpdata] ?= "0"
  50. ARCHIVER_MODE[recipe] ?= "0"
  51. ARCHIVER_MODE[mirror] ?= "split"
  52. DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
  53. ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
  54. ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
  55. ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
  56. ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
  57. ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
  58. # When producing a combined mirror directory, allow duplicates for the case
  59. # where multiple recipes use the same SRC_URI.
  60. ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror"
  61. SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}/mirror"
  62. do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
  63. do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
  64. do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
  65. do_deploy_archives[dirs] = "${WORKDIR}"
  66. # This is a convenience for the shell script to use it
  67. python () {
  68. pn = d.getVar('PN')
  69. assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
  70. if pn in assume_provided:
  71. for p in d.getVar("PROVIDES").split():
  72. if p != pn:
  73. pn = p
  74. break
  75. included, reason = copyleft_should_include(d)
  76. if not included:
  77. bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
  78. return
  79. else:
  80. bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
  81. # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
  82. # so avoid archiving source here.
  83. if pn.startswith('glibc-locale'):
  84. return
  85. # We just archive gcc-source for all the gcc related recipes
  86. if d.getVar('BPN') in ['gcc', 'libgcc'] \
  87. and not pn.startswith('gcc-source'):
  88. bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
  89. return
  90. def hasTask(task):
  91. return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
  92. ar_src = d.getVarFlag('ARCHIVER_MODE', 'src')
  93. ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata')
  94. ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe')
  95. if ar_src == "original":
  96. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
  97. # 'patched' and 'configured' invoke do_unpack_and_patch because
  98. # do_ar_patched resp. do_ar_configured depend on it, but for 'original'
  99. # we have to add it explicitly.
  100. if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
  101. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn)
  102. elif ar_src == "patched":
  103. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
  104. elif ar_src == "configured":
  105. # We can't use "addtask do_ar_configured after do_configure" since it
  106. # will cause the deptask of do_populate_sysroot to run not matter what
  107. # archives we need, so we add the depends here.
  108. # There is a corner case with "gcc-source-${PV}" recipes, they don't have
  109. # the "do_configure" task, so we need to use "do_preconfigure"
  110. if hasTask("do_preconfigure"):
  111. d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
  112. elif hasTask("do_configure"):
  113. d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
  114. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
  115. elif ar_src == "mirror":
  116. d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn)
  117. elif ar_src:
  118. bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
  119. if ar_dumpdata == "1":
  120. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
  121. if ar_recipe == "1":
  122. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
  123. # Output the SRPM package
  124. if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'):
  125. if "package_rpm" not in d.getVar('PACKAGE_CLASSES'):
  126. bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES")
  127. # Some recipes do not have any packaging tasks
  128. if hasTask("do_package_write_rpm"):
  129. d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
  130. d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}')
  131. d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}')
  132. d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}')
  133. if ar_dumpdata == "1":
  134. d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
  135. if ar_recipe == "1":
  136. d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
  137. if ar_src == "original":
  138. d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
  139. elif ar_src == "patched":
  140. d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
  141. elif ar_src == "configured":
  142. d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
  143. }
  144. # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
  145. # Files in SRC_URI are copied directly, anything that's a directory
  146. # (e.g. git repositories) is "unpacked" and then put into a tarball.
  147. python do_ar_original() {
  148. import shutil, tempfile
  149. if d.getVarFlag('ARCHIVER_MODE', 'src') != "original":
  150. return
  151. ar_outdir = d.getVar('ARCHIVER_OUTDIR')
  152. bb.note('Archiving the original source...')
  153. urls = d.getVar("SRC_URI").split()
  154. # destsuffix (git fetcher) and subdir (everything else) are allowed to be
  155. # absolute paths (for example, destsuffix=${S}/foobar).
  156. # That messes with unpacking inside our tmpdir below, because the fetchers
  157. # will then unpack in that directory and completely ignore the tmpdir.
  158. # That breaks parallel tasks relying on ${S}, like do_compile.
  159. #
  160. # To solve this, we remove these parameters from all URLs.
  161. # We do this even for relative paths because it makes the content of the
  162. # archives more useful (no extra paths that are only used during
  163. # compilation).
  164. for i, url in enumerate(urls):
  165. decoded = bb.fetch2.decodeurl(url)
  166. for param in ('destsuffix', 'subdir'):
  167. if param in decoded[5]:
  168. del decoded[5][param]
  169. encoded = bb.fetch2.encodeurl(decoded)
  170. urls[i] = encoded
  171. # Cleanup SRC_URI before call bb.fetch2.Fetch() since now SRC_URI is in the
  172. # variable "urls", otherwise there might be errors like:
  173. # The SRCREV_FORMAT variable must be set when multiple SCMs are used
  174. ld = bb.data.createCopy(d)
  175. ld.setVar('SRC_URI', '')
  176. fetch = bb.fetch2.Fetch(urls, ld)
  177. tarball_suffix = {}
  178. for url in fetch.urls:
  179. local = fetch.localpath(url).rstrip("/");
  180. if os.path.isfile(local):
  181. shutil.copy(local, ar_outdir)
  182. elif os.path.isdir(local):
  183. tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
  184. fetch.unpack(tmpdir, (url,))
  185. # To handle recipes with more than one source, we add the "name"
  186. # URL parameter as suffix. We treat it as an error when
  187. # there's more than one URL without a name, or a name gets reused.
  188. # This is an additional safety net, in practice the name has
  189. # to be set when using the git fetcher, otherwise SRCREV cannot
  190. # be set separately for each URL.
  191. params = bb.fetch2.decodeurl(url)[5]
  192. type = bb.fetch2.decodeurl(url)[0]
  193. location = bb.fetch2.decodeurl(url)[2]
  194. name = params.get('name', '')
  195. if type.lower() == 'file':
  196. name_tmp = location.rstrip("*").rstrip("/")
  197. name = os.path.basename(name_tmp)
  198. else:
  199. if name in tarball_suffix:
  200. if not name:
  201. bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
  202. else:
  203. bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
  204. tarball_suffix[name] = url
  205. create_tarball(d, tmpdir + '/.', name, ar_outdir)
  206. # Emit patch series files for 'original'
  207. bb.note('Writing patch series files...')
  208. for patch in src_patches(d):
  209. _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
  210. patchdir = parm.get('patchdir')
  211. if patchdir:
  212. series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
  213. else:
  214. series = os.path.join(ar_outdir, 'series')
  215. with open(series, 'a') as s:
  216. s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
  217. }
  218. python do_ar_patched() {
  219. if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched':
  220. return
  221. # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
  222. ar_outdir = d.getVar('ARCHIVER_OUTDIR')
  223. if not is_work_shared(d):
  224. ar_workdir = d.getVar('ARCHIVER_WORKDIR')
  225. d.setVar('WORKDIR', ar_workdir)
  226. bb.note('Archiving the patched source...')
  227. create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
  228. }
  229. python do_ar_configured() {
  230. import shutil
  231. # Forcibly expand the sysroot paths as we're about to change WORKDIR
  232. d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST'))
  233. d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET'))
  234. d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT'))
  235. d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE'))
  236. ar_outdir = d.getVar('ARCHIVER_OUTDIR')
  237. if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured':
  238. bb.note('Archiving the configured source...')
  239. pn = d.getVar('PN')
  240. # "gcc-source-${PV}" recipes don't have "do_configure"
  241. # task, so we need to run "do_preconfigure" instead
  242. if pn.startswith("gcc-source-"):
  243. d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
  244. bb.build.exec_func('do_preconfigure', d)
  245. # The libtool-native's do_configure will remove the
  246. # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
  247. # do_configure, we archive the already configured ${S} to
  248. # instead of.
  249. elif pn != 'libtool-native':
  250. def runTask(task):
  251. prefuncs = d.getVarFlag(task, 'prefuncs') or ''
  252. for func in prefuncs.split():
  253. if func != "sysroot_cleansstate":
  254. bb.build.exec_func(func, d)
  255. bb.build.exec_func(task, d)
  256. postfuncs = d.getVarFlag(task, 'postfuncs') or ''
  257. for func in postfuncs.split():
  258. if func != 'do_qa_configure':
  259. bb.build.exec_func(func, d)
  260. # Change the WORKDIR to make do_configure run in another dir.
  261. d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
  262. preceeds = bb.build.preceedtask('do_configure', False, d)
  263. for task in preceeds:
  264. if task != 'do_patch' and task != 'do_prepare_recipe_sysroot':
  265. runTask(task)
  266. runTask('do_configure')
  267. srcdir = d.getVar('S')
  268. builddir = d.getVar('B')
  269. if srcdir != builddir:
  270. if os.path.exists(builddir):
  271. oe.path.copytree(builddir, os.path.join(srcdir, \
  272. 'build.%s.ar_configured' % d.getVar('PF')))
  273. create_tarball(d, srcdir, 'configured', ar_outdir)
  274. }
  275. python do_ar_mirror() {
  276. import subprocess
  277. src_uri = (d.getVar('SRC_URI') or '').split()
  278. if len(src_uri) == 0:
  279. return
  280. dl_dir = d.getVar('DL_DIR')
  281. mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split()
  282. mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror')
  283. have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS')
  284. if mirror_mode == 'combined':
  285. destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR')
  286. elif mirror_mode == 'split':
  287. destdir = d.getVar('ARCHIVER_OUTDIR')
  288. else:
  289. bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode))
  290. if not have_mirror_tarballs:
  291. bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`')
  292. def is_excluded(url):
  293. for prefix in mirror_exclusions:
  294. if url.startswith(prefix):
  295. return True
  296. return False
  297. bb.note('Archiving the source as a mirror...')
  298. bb.utils.mkdirhier(destdir)
  299. fetcher = bb.fetch2.Fetch(src_uri, d)
  300. for ud in fetcher.expanded_urldata():
  301. if is_excluded(ud.url):
  302. bb.note('Skipping excluded url: %s' % (ud.url))
  303. continue
  304. bb.note('Archiving url: %s' % (ud.url))
  305. ud.setup_localpath(d)
  306. localpath = None
  307. # Check for mirror tarballs first. We will archive the first mirror
  308. # tarball that we find as it's assumed that we just need one.
  309. for mirror_fname in ud.mirrortarballs:
  310. mirror_path = os.path.join(dl_dir, mirror_fname)
  311. if os.path.exists(mirror_path):
  312. bb.note('Found mirror tarball: %s' % (mirror_path))
  313. localpath = mirror_path
  314. break
  315. if len(ud.mirrortarballs) and not localpath:
  316. bb.warn('Mirror tarballs are listed for a source but none are present. ' \
  317. 'Falling back to original download.\n' \
  318. 'SRC_URI = %s' % (ud.url))
  319. # Check original download
  320. if not localpath:
  321. bb.note('Using original download: %s' % (ud.localpath))
  322. localpath = ud.localpath
  323. if not localpath or not os.path.exists(localpath):
  324. bb.fatal('Original download is missing for a source.\n' \
  325. 'SRC_URI = %s' % (ud.url))
  326. # We now have an appropriate localpath
  327. bb.note('Copying source mirror')
  328. cmd = 'cp -fpPRH %s %s' % (localpath, destdir)
  329. subprocess.check_call(cmd, shell=True)
  330. }
  331. def exclude_useless_paths(tarinfo):
  332. if tarinfo.isdir():
  333. if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
  334. return None
  335. elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
  336. return None
  337. return tarinfo
  338. def create_tarball(d, srcdir, suffix, ar_outdir):
  339. """
  340. create the tarball from srcdir
  341. """
  342. import tarfile
  343. # Make sure we are only creating a single tarball for gcc sources
  344. if (d.getVar('SRC_URI') == ""):
  345. return
  346. # For the kernel archive, srcdir may just be a link to the
  347. # work-shared location. Use os.path.realpath to make sure
  348. # that we archive the actual directory and not just the link.
  349. srcdir = os.path.realpath(srcdir)
  350. bb.utils.mkdirhier(ar_outdir)
  351. if suffix:
  352. filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
  353. else:
  354. filename = '%s.tar.gz' % d.getVar('PF')
  355. tarname = os.path.join(ar_outdir, filename)
  356. bb.note('Creating %s' % tarname)
  357. tar = tarfile.open(tarname, 'w:gz')
  358. tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
  359. tar.close()
  360. # creating .diff.gz between source.orig and source
  361. def create_diff_gz(d, src_orig, src, ar_outdir):
  362. import subprocess
  363. if not os.path.isdir(src) or not os.path.isdir(src_orig):
  364. return
  365. # The diff --exclude can't exclude the file with path, so we copy
  366. # the patched source, and remove the files that we'd like to
  367. # exclude.
  368. src_patched = src + '.patched'
  369. oe.path.copyhardlinktree(src, src_patched)
  370. for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split():
  371. bb.utils.remove(os.path.join(src_orig, i), recurse=True)
  372. bb.utils.remove(os.path.join(src_patched, i), recurse=True)
  373. dirname = os.path.dirname(src)
  374. basename = os.path.basename(src)
  375. bb.utils.mkdirhier(ar_outdir)
  376. cwd = os.getcwd()
  377. try:
  378. os.chdir(dirname)
  379. out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
  380. diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
  381. subprocess.check_call(diff_cmd, shell=True)
  382. bb.utils.remove(src_patched, recurse=True)
  383. finally:
  384. os.chdir(cwd)
  385. def is_work_shared(d):
  386. pn = d.getVar('PN')
  387. return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
  388. # Run do_unpack and do_patch
  389. python do_unpack_and_patch() {
  390. if d.getVarFlag('ARCHIVER_MODE', 'src') not in \
  391. [ 'patched', 'configured'] and \
  392. d.getVarFlag('ARCHIVER_MODE', 'diff') != '1':
  393. return
  394. ar_outdir = d.getVar('ARCHIVER_OUTDIR')
  395. ar_workdir = d.getVar('ARCHIVER_WORKDIR')
  396. ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
  397. pn = d.getVar('PN')
  398. # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
  399. if not is_work_shared(d):
  400. # Change the WORKDIR to make do_unpack do_patch run in another dir.
  401. d.setVar('WORKDIR', ar_workdir)
  402. # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
  403. d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native)
  404. # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
  405. # possibly requiring of the following tasks (such as some recipes's
  406. # do_patch required 'B' existed).
  407. bb.utils.mkdirhier(d.getVar('B'))
  408. bb.build.exec_func('do_unpack', d)
  409. # Save the original source for creating the patches
  410. if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
  411. src = d.getVar('S').rstrip('/')
  412. src_orig = '%s.orig' % src
  413. oe.path.copytree(src, src_orig)
  414. # Make sure gcc and kernel sources are patched only once
  415. if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
  416. bb.build.exec_func('do_patch', d)
  417. # Create the patches
  418. if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
  419. bb.note('Creating diff gz...')
  420. create_diff_gz(d, src_orig, src, ar_outdir)
  421. bb.utils.remove(src_orig, recurse=True)
  422. }
  423. # BBINCLUDED is special (excluded from basehash signature
  424. # calculation). Using it in a task signature can cause "basehash
  425. # changed" errors.
  426. #
  427. # Depending on BBINCLUDED also causes do_ar_recipe to run again
  428. # for unrelated changes, like adding or removing buildhistory.bbclass.
  429. #
  430. # For these reasons we ignore the dependency completely. The versioning
  431. # of the output file ensures that we create it each time the recipe
  432. # gets rebuilt, at least as long as a PR server is used. We also rely
  433. # on that mechanism to catch changes in the file content, because the
  434. # file content is not part of of the task signature either.
  435. do_ar_recipe[vardepsexclude] += "BBINCLUDED"
  436. python do_ar_recipe () {
  437. """
  438. archive the recipe, including .bb and .inc.
  439. """
  440. import re
  441. import shutil
  442. require_re = re.compile( r"require\s+(.+)" )
  443. include_re = re.compile( r"include\s+(.+)" )
  444. bbfile = d.getVar('FILE')
  445. outdir = os.path.join(d.getVar('WORKDIR'), \
  446. '%s-recipe' % d.getVar('PF'))
  447. bb.utils.mkdirhier(outdir)
  448. shutil.copy(bbfile, outdir)
  449. pn = d.getVar('PN')
  450. bbappend_files = d.getVar('BBINCLUDED').split()
  451. # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
  452. # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
  453. bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn))
  454. bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn))
  455. for file in bbappend_files:
  456. if bbappend_re.match(file) or bbappend_re1.match(file):
  457. shutil.copy(file, outdir)
  458. dirname = os.path.dirname(bbfile)
  459. bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
  460. f = open(bbfile, 'r')
  461. for line in f.readlines():
  462. incfile = None
  463. if require_re.match(line):
  464. incfile = require_re.match(line).group(1)
  465. elif include_re.match(line):
  466. incfile = include_re.match(line).group(1)
  467. if incfile:
  468. incfile = d.expand(incfile)
  469. if incfile:
  470. incfile = bb.utils.which(bbpath, incfile)
  471. if incfile:
  472. shutil.copy(incfile, outdir)
  473. create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
  474. bb.utils.remove(outdir, recurse=True)
  475. }
  476. python do_dumpdata () {
  477. """
  478. dump environment data to ${PF}-showdata.dump
  479. """
  480. dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
  481. '%s-showdata.dump' % d.getVar('PF'))
  482. bb.note('Dumping metadata into %s' % dumpfile)
  483. with open(dumpfile, "w") as f:
  484. # emit variables and shell functions
  485. bb.data.emit_env(f, d, True)
  486. # emit the metadata which isn't valid shell
  487. for e in d.keys():
  488. if d.getVarFlag(e, "python", False):
  489. f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
  490. }
  491. SSTATETASKS += "do_deploy_archives"
  492. do_deploy_archives () {
  493. echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
  494. }
  495. python do_deploy_archives_setscene () {
  496. sstate_setscene(d)
  497. }
  498. do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
  499. do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
  500. do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
  501. addtask do_deploy_archives_setscene
  502. addtask do_ar_original after do_unpack
  503. addtask do_unpack_and_patch after do_patch do_preconfigure
  504. addtask do_ar_patched after do_unpack_and_patch
  505. addtask do_ar_configured after do_unpack_and_patch
  506. addtask do_ar_mirror after do_fetch
  507. addtask do_dumpdata
  508. addtask do_ar_recipe
  509. addtask do_deploy_archives
  510. do_build[recrdeptask] += "do_deploy_archives"
  511. do_populate_sdk[recrdeptask] += "do_deploy_archives"
  512. python () {
  513. # Add tasks in the correct order, specifically for linux-yocto to avoid race condition.
  514. # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency
  515. # so that do_kernel_configme does not need to run again when do_unpack_and_patch
  516. # gets added or removed (by adding or removing archiver.bbclass).
  517. if bb.data.inherits_class('kernel-yocto', d):
  518. bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
  519. }