image-manifest 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523
  1. #!/usr/bin/env python3
  2. # Script to extract information from image manifests
  3. #
  4. # Copyright (C) 2018 Intel Corporation
  5. # Copyright (C) 2021 Wind River Systems, Inc.
  6. #
  7. # SPDX-License-Identifier: GPL-2.0-only
  8. #
  9. import sys
  10. import os
  11. import argparse
  12. import logging
  13. import json
  14. import shutil
  15. import tempfile
  16. import tarfile
  17. from collections import OrderedDict
  18. scripts_path = os.path.dirname(__file__)
  19. lib_path = scripts_path + '/../lib'
  20. sys.path = sys.path + [lib_path]
  21. import scriptutils
  22. logger = scriptutils.logger_create(os.path.basename(__file__))
  23. import argparse_oe
  24. import scriptpath
  25. bitbakepath = scriptpath.add_bitbake_lib_path()
  26. if not bitbakepath:
  27. logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
  28. sys.exit(1)
  29. logger.debug('Using standard bitbake path %s' % bitbakepath)
  30. scriptpath.add_oe_lib_path()
  31. import bb.tinfoil
  32. import bb.utils
  33. import oe.utils
  34. import oe.recipeutils
  35. def get_pkg_list(manifest):
  36. pkglist = []
  37. with open(manifest, 'r') as f:
  38. for line in f:
  39. linesplit = line.split()
  40. if len(linesplit) == 3:
  41. # manifest file
  42. pkglist.append(linesplit[0])
  43. elif len(linesplit) == 1:
  44. # build dependency file
  45. pkglist.append(linesplit[0])
  46. return sorted(pkglist)
  47. def list_packages(args):
  48. pkglist = get_pkg_list(args.manifest)
  49. for pkg in pkglist:
  50. print('%s' % pkg)
  51. def pkg2recipe(tinfoil, pkg):
  52. if "-native" in pkg:
  53. logger.info('skipping %s' % pkg)
  54. return None
  55. pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
  56. pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
  57. logger.debug('pkgdatafile %s' % pkgdatafile)
  58. try:
  59. f = open(pkgdatafile, 'r')
  60. for line in f:
  61. if line.startswith('PN:'):
  62. recipe = line.split(':', 1)[1].strip()
  63. return recipe
  64. except Exception:
  65. logger.warning('%s is missing' % pkgdatafile)
  66. return None
  67. def get_recipe_list(manifest, tinfoil):
  68. pkglist = get_pkg_list(manifest)
  69. recipelist = []
  70. for pkg in pkglist:
  71. recipe = pkg2recipe(tinfoil,pkg)
  72. if recipe:
  73. if not recipe in recipelist:
  74. recipelist.append(recipe)
  75. return sorted(recipelist)
  76. def list_recipes(args):
  77. import bb.tinfoil
  78. with bb.tinfoil.Tinfoil() as tinfoil:
  79. tinfoil.logger.setLevel(logger.getEffectiveLevel())
  80. tinfoil.prepare(config_only=True)
  81. recipelist = get_recipe_list(args.manifest, tinfoil)
  82. for recipe in sorted(recipelist):
  83. print('%s' % recipe)
  84. def list_layers(args):
  85. def find_git_repo(pth):
  86. checkpth = pth
  87. while checkpth != os.sep:
  88. if os.path.exists(os.path.join(checkpth, '.git')):
  89. return checkpth
  90. checkpth = os.path.dirname(checkpth)
  91. return None
  92. def get_git_remote_branch(repodir):
  93. try:
  94. stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
  95. except bb.process.ExecutionError as e:
  96. stdout = None
  97. if stdout:
  98. return stdout.strip()
  99. else:
  100. return None
  101. def get_git_head_commit(repodir):
  102. try:
  103. stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
  104. except bb.process.ExecutionError as e:
  105. stdout = None
  106. if stdout:
  107. return stdout.strip()
  108. else:
  109. return None
  110. def get_git_repo_url(repodir, remote='origin'):
  111. import bb.process
  112. # Try to get upstream repo location from origin remote
  113. try:
  114. stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
  115. except bb.process.ExecutionError as e:
  116. stdout = None
  117. if stdout:
  118. for line in stdout.splitlines():
  119. splitline = line.split()
  120. if len(splitline) > 1:
  121. if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
  122. return splitline[1]
  123. return None
  124. with bb.tinfoil.Tinfoil() as tinfoil:
  125. tinfoil.logger.setLevel(logger.getEffectiveLevel())
  126. tinfoil.prepare(config_only=False)
  127. layers = OrderedDict()
  128. for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
  129. layerdata = OrderedDict()
  130. layername = os.path.basename(layerdir)
  131. logger.debug('layername %s, layerdir %s' % (layername, layerdir))
  132. if layername in layers:
  133. logger.warning('layername %s is not unique in configuration' % layername)
  134. layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
  135. logger.debug('trying layername %s' % layername)
  136. if layername in layers:
  137. logger.error('Layer name %s is not unique in configuration' % layername)
  138. sys.exit(2)
  139. repodir = find_git_repo(layerdir)
  140. if repodir:
  141. remotebranch = get_git_remote_branch(repodir)
  142. remote = 'origin'
  143. if remotebranch and '/' in remotebranch:
  144. rbsplit = remotebranch.split('/', 1)
  145. layerdata['actual_branch'] = rbsplit[1]
  146. remote = rbsplit[0]
  147. layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
  148. if os.path.abspath(repodir) != os.path.abspath(layerdir):
  149. layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
  150. commit = get_git_head_commit(repodir)
  151. if commit:
  152. layerdata['vcs_commit'] = commit
  153. layers[layername] = layerdata
  154. json.dump(layers, args.output, indent=2)
  155. def get_recipe(args):
  156. with bb.tinfoil.Tinfoil() as tinfoil:
  157. tinfoil.logger.setLevel(logger.getEffectiveLevel())
  158. tinfoil.prepare(config_only=True)
  159. recipe = pkg2recipe(tinfoil, args.package)
  160. print(' %s package provided by %s' % (args.package, recipe))
  161. def pkg_dependencies(args):
  162. def get_recipe_info(tinfoil, recipe):
  163. try:
  164. info = tinfoil.get_recipe_info(recipe)
  165. except Exception:
  166. logger.error('Failed to get recipe info for: %s' % recipe)
  167. sys.exit(1)
  168. if not info:
  169. logger.warning('No recipe info found for: %s' % recipe)
  170. sys.exit(1)
  171. append_files = tinfoil.get_file_appends(info.fn)
  172. appends = True
  173. data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
  174. data.pn = info.pn
  175. data.pv = info.pv
  176. return data
  177. def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
  178. spaces = ' ' * order
  179. data = recipe_info[rn]
  180. if args.native:
  181. logger.debug('%s- %s' % (spaces, data.pn))
  182. elif "-native" not in data.pn:
  183. if "cross" not in data.pn:
  184. logger.debug('%s- %s' % (spaces, data.pn))
  185. depends = []
  186. for dep in data.depends:
  187. if dep not in assume_provided:
  188. depends.append(dep)
  189. # First find all dependencies not in package list.
  190. for dep in depends:
  191. if dep not in packages:
  192. packages.append(dep)
  193. dep_data = get_recipe_info(tinfoil, dep)
  194. # Do this once now to reduce the number of bitbake calls.
  195. dep_data.depends = dep_data.getVar('DEPENDS').split()
  196. recipe_info[dep] = dep_data
  197. # Then recursively analyze all of the dependencies for the current recipe.
  198. for dep in depends:
  199. find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
  200. with bb.tinfoil.Tinfoil() as tinfoil:
  201. tinfoil.logger.setLevel(logger.getEffectiveLevel())
  202. tinfoil.prepare()
  203. assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
  204. logger.debug('assumed provided:')
  205. for ap in sorted(assume_provided):
  206. logger.debug(' - %s' % ap)
  207. recipe = pkg2recipe(tinfoil, args.package)
  208. data = get_recipe_info(tinfoil, recipe)
  209. data.depends = []
  210. depends = data.getVar('DEPENDS').split()
  211. for dep in depends:
  212. if dep not in assume_provided:
  213. data.depends.append(dep)
  214. recipe_info = dict([(recipe, data)])
  215. packages = []
  216. find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
  217. print('\nThe following packages are required to build %s' % recipe)
  218. for p in sorted(packages):
  219. data = recipe_info[p]
  220. if "-native" not in data.pn:
  221. if "cross" not in data.pn:
  222. print(" %s (%s)" % (data.pn,p))
  223. if args.native:
  224. print('\nThe following native packages are required to build %s' % recipe)
  225. for p in sorted(packages):
  226. data = recipe_info[p]
  227. if "-native" in data.pn:
  228. print(" %s(%s)" % (data.pn,p))
  229. if "cross" in data.pn:
  230. print(" %s(%s)" % (data.pn,p))
  231. def default_config():
  232. vlist = OrderedDict()
  233. vlist['PV'] = 'yes'
  234. vlist['SUMMARY'] = 'no'
  235. vlist['DESCRIPTION'] = 'no'
  236. vlist['SECTION'] = 'no'
  237. vlist['LICENSE'] = 'yes'
  238. vlist['HOMEPAGE'] = 'no'
  239. vlist['BUGTRACKER'] = 'no'
  240. vlist['PROVIDES'] = 'no'
  241. vlist['BBCLASSEXTEND'] = 'no'
  242. vlist['DEPENDS'] = 'no'
  243. vlist['PACKAGECONFIG'] = 'no'
  244. vlist['SRC_URI'] = 'yes'
  245. vlist['SRCREV'] = 'yes'
  246. vlist['EXTRA_OECONF'] = 'no'
  247. vlist['EXTRA_OESCONS'] = 'no'
  248. vlist['EXTRA_OECMAKE'] = 'no'
  249. vlist['EXTRA_OEMESON'] = 'no'
  250. clist = OrderedDict()
  251. clist['variables'] = vlist
  252. clist['filepath'] = 'no'
  253. clist['sha256sum'] = 'no'
  254. clist['layerdir'] = 'no'
  255. clist['layer'] = 'no'
  256. clist['inherits'] = 'no'
  257. clist['source_urls'] = 'no'
  258. clist['packageconfig_opts'] = 'no'
  259. clist['patches'] = 'no'
  260. clist['packagedir'] = 'no'
  261. return clist
  262. def dump_config(args):
  263. config = default_config()
  264. f = open('default_config.json', 'w')
  265. json.dump(config, f, indent=2)
  266. logger.info('Default config list dumped to default_config.json')
  267. def export_manifest_info(args):
  268. def handle_value(value):
  269. if value:
  270. return oe.utils.squashspaces(value)
  271. else:
  272. return value
  273. if args.config:
  274. logger.debug('config: %s' % args.config)
  275. f = open(args.config, 'r')
  276. config = json.load(f, object_pairs_hook=OrderedDict)
  277. else:
  278. config = default_config()
  279. if logger.isEnabledFor(logging.DEBUG):
  280. print('Configuration:')
  281. json.dump(config, sys.stdout, indent=2)
  282. print('')
  283. tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
  284. logger.debug('tmp dir: %s' % tmpoutdir)
  285. # export manifest
  286. shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
  287. with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
  288. tinfoil.logger.setLevel(logger.getEffectiveLevel())
  289. tinfoil.prepare(config_only=False)
  290. pkglist = get_pkg_list(args.manifest)
  291. # export pkg list
  292. f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
  293. for pkg in pkglist:
  294. f.write('%s\n' % pkg)
  295. f.close()
  296. recipelist = []
  297. for pkg in pkglist:
  298. recipe = pkg2recipe(tinfoil,pkg)
  299. if recipe:
  300. if not recipe in recipelist:
  301. recipelist.append(recipe)
  302. recipelist.sort()
  303. # export recipe list
  304. f = open(os.path.join(tmpoutdir, "recipes"), 'w')
  305. for recipe in recipelist:
  306. f.write('%s\n' % recipe)
  307. f.close()
  308. try:
  309. rvalues = OrderedDict()
  310. for pn in sorted(recipelist):
  311. logger.debug('Package: %s' % pn)
  312. rd = tinfoil.parse_recipe(pn)
  313. rvalues[pn] = OrderedDict()
  314. for varname in config['variables']:
  315. if config['variables'][varname] == 'yes':
  316. rvalues[pn][varname] = handle_value(rd.getVar(varname))
  317. fpth = rd.getVar('FILE')
  318. layerdir = oe.recipeutils.find_layerdir(fpth)
  319. if config['filepath'] == 'yes':
  320. rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
  321. if config['sha256sum'] == 'yes':
  322. rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
  323. if config['layerdir'] == 'yes':
  324. rvalues[pn]['layerdir'] = layerdir
  325. if config['layer'] == 'yes':
  326. rvalues[pn]['layer'] = os.path.basename(layerdir)
  327. if config['inherits'] == 'yes':
  328. gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
  329. lr = set(rd.getVar("__inherit_cache") or [])
  330. rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
  331. if config['source_urls'] == 'yes':
  332. rvalues[pn]['source_urls'] = []
  333. for url in (rd.getVar('SRC_URI') or '').split():
  334. if not url.startswith('file://'):
  335. url = url.split(';')[0]
  336. rvalues[pn]['source_urls'].append(url)
  337. if config['packageconfig_opts'] == 'yes':
  338. rvalues[pn]['packageconfig_opts'] = OrderedDict()
  339. for key in rd.getVarFlags('PACKAGECONFIG').keys():
  340. if key == 'doc':
  341. continue
  342. rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True)
  343. if config['patches'] == 'yes':
  344. patches = oe.recipeutils.get_recipe_patches(rd)
  345. rvalues[pn]['patches'] = []
  346. if patches:
  347. recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
  348. bb.utils.mkdirhier(recipeoutdir)
  349. for patch in patches:
  350. # Patches may be in other layers too
  351. patchlayerdir = oe.recipeutils.find_layerdir(patch)
  352. # patchlayerdir will be None for remote patches, which we ignore
  353. # (since currently they are considered as part of sources)
  354. if patchlayerdir:
  355. rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
  356. shutil.copy(patch, recipeoutdir)
  357. if config['packagedir'] == 'yes':
  358. pn_dir = os.path.join(tmpoutdir, pn)
  359. bb.utils.mkdirhier(pn_dir)
  360. f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
  361. json.dump(rvalues[pn], f, indent=2)
  362. f.close()
  363. with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
  364. json.dump(rvalues, f, indent=2)
  365. if args.output:
  366. outname = os.path.basename(args.output)
  367. else:
  368. outname = os.path.splitext(os.path.basename(args.manifest))[0]
  369. if outname.endswith('.tar.gz'):
  370. outname = outname[:-7]
  371. elif outname.endswith('.tgz'):
  372. outname = outname[:-4]
  373. tarfn = outname
  374. if tarfn.endswith(os.sep):
  375. tarfn = tarfn[:-1]
  376. if not tarfn.endswith(('.tar.gz', '.tgz')):
  377. tarfn += '.tar.gz'
  378. with open(tarfn, 'wb') as f:
  379. with tarfile.open(None, "w:gz", f) as tar:
  380. tar.add(tmpoutdir, outname)
  381. finally:
  382. shutil.rmtree(tmpoutdir)
  383. def main():
  384. parser = argparse_oe.ArgumentParser(description="Image manifest utility",
  385. epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
  386. parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
  387. parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
  388. subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
  389. subparsers.required = True
  390. # get recipe info
  391. parser_get_recipes = subparsers.add_parser('recipe-info',
  392. help='Get recipe info',
  393. description='Get recipe information for a package')
  394. parser_get_recipes.add_argument('package', help='Package name')
  395. parser_get_recipes.set_defaults(func=get_recipe)
  396. # list runtime dependencies
  397. parser_pkg_dep = subparsers.add_parser('list-depends',
  398. help='List dependencies',
  399. description='List dependencies required to build the package')
  400. parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
  401. parser_pkg_dep.add_argument('package', help='Package name')
  402. parser_pkg_dep.set_defaults(func=pkg_dependencies)
  403. # list recipes
  404. parser_recipes = subparsers.add_parser('list-recipes',
  405. help='List recipes producing packages within an image',
  406. description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
  407. parser_recipes.add_argument('manifest', help='Manifest file')
  408. parser_recipes.set_defaults(func=list_recipes)
  409. # list packages
  410. parser_packages = subparsers.add_parser('list-packages',
  411. help='List packages within an image',
  412. description='Lists packages that went into an image, using the manifest')
  413. parser_packages.add_argument('manifest', help='Manifest file')
  414. parser_packages.set_defaults(func=list_packages)
  415. # list layers
  416. parser_layers = subparsers.add_parser('list-layers',
  417. help='List included layers',
  418. description='Lists included layers')
  419. parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
  420. default=sys.stdout, type=argparse.FileType('w'))
  421. parser_layers.set_defaults(func=list_layers)
  422. # dump default configuration file
  423. parser_dconfig = subparsers.add_parser('dump-config',
  424. help='Dump default config',
  425. description='Dump default config to default_config.json')
  426. parser_dconfig.set_defaults(func=dump_config)
  427. # export recipe info for packages in manifest
  428. parser_export = subparsers.add_parser('manifest-info',
  429. help='Export recipe info for a manifest',
  430. description='Export recipe information using the manifest')
  431. parser_export.add_argument('-c', '--config', help='load config from json file')
  432. parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
  433. parser_export.add_argument('manifest', help='Manifest file')
  434. parser_export.set_defaults(func=export_manifest_info)
  435. args = parser.parse_args()
  436. if args.debug:
  437. logger.setLevel(logging.DEBUG)
  438. logger.debug("Debug Enabled")
  439. elif args.quiet:
  440. logger.setLevel(logging.ERROR)
  441. ret = args.func(args)
  442. return ret
  443. if __name__ == "__main__":
  444. try:
  445. ret = main()
  446. except Exception:
  447. ret = 1
  448. import traceback
  449. traceback.print_exc()
  450. sys.exit(ret)