package.bbclass 97 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547
  1. #
  2. # Copyright OpenEmbedded Contributors
  3. #
  4. # SPDX-License-Identifier: MIT
  5. #
  6. #
  7. # Packaging process
  8. #
  9. # Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
  10. # Taking D and splitting it up into the packages listed in PACKAGES, placing the
  11. # resulting output in PKGDEST.
  12. #
  13. # There are the following default steps but PACKAGEFUNCS can be extended:
  14. #
  15. # a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
  16. #
  17. # b) perform_packagecopy - Copy D into PKGD
  18. #
  19. # c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
  20. #
  21. # d) split_and_strip_files - split the files into runtime and debug and strip them.
  22. # Debug files include debug info split, and associated sources that end up in -dbg packages
  23. #
  24. # e) fixup_perms - Fix up permissions in the package before we split it.
  25. #
  26. # f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
  27. # Also triggers the binary stripping code to put files in -dbg packages.
  28. #
  29. # g) package_do_filedeps - Collect perfile run-time dependency metadata
  30. # The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
  31. # a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
  32. #
  33. # h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
  34. # dependencies found. Also stores the package name so anyone else using this library
  35. # knows which package to depend on.
  36. #
  37. # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
  38. #
  39. # j) read_shlibdeps - Reads the stored shlibs information into the metadata
  40. #
  41. # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
  42. #
  43. # l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
  44. # packaging steps
  45. inherit packagedata
  46. inherit chrpath
  47. inherit package_pkgdata
  48. inherit insane
  49. PKGD = "${WORKDIR}/package"
  50. PKGDEST = "${WORKDIR}/packages-split"
  51. LOCALE_SECTION ?= ''
  52. ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
  53. # rpm is used for the per-file dependency identification
  54. # dwarfsrcfiles is used to determine the list of debug source files
  55. PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
  56. # If your postinstall can execute at rootfs creation time rather than on
  57. # target but depends on a native/cross tool in order to execute, you need to
  58. # list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
  59. # in the package dependencies as normal, this is just for native/cross support
  60. # tools at rootfs build time.
  61. PACKAGE_WRITE_DEPS ??= ""
  62. def legitimize_package_name(s):
  63. """
  64. Make sure package names are legitimate strings
  65. """
  66. import re
  67. def fixutf(m):
  68. cp = m.group(1)
  69. if cp:
  70. return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
  71. # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
  72. s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
  73. # Remaining package name validity fixes
  74. return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
  75. def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
  76. """
  77. Used in .bb files to split up dynamically generated subpackages of a
  78. given package, usually plugins or modules.
  79. Arguments:
  80. root -- the path in which to search
  81. file_regex -- regular expression to match searched files. Use
  82. parentheses () to mark the part of this expression
  83. that should be used to derive the module name (to be
  84. substituted where %s is used in other function
  85. arguments as noted below)
  86. output_pattern -- pattern to use for the package names. Must include %s.
  87. description -- description to set for each package. Must include %s.
  88. postinst -- postinstall script to use for all packages (as a
  89. string)
  90. recursive -- True to perform a recursive search - default False
  91. hook -- a hook function to be called for every match. The
  92. function will be called with the following arguments
  93. (in the order listed):
  94. f: full path to the file/directory match
  95. pkg: the package name
  96. file_regex: as above
  97. output_pattern: as above
  98. modulename: the module name derived using file_regex
  99. extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
  100. all packages. The default value of None causes a
  101. dependency on the main package (${PN}) - if you do
  102. not want this, pass '' for this parameter.
  103. aux_files_pattern -- extra item(s) to be added to FILES for each
  104. package. Can be a single string item or a list of
  105. strings for multiple items. Must include %s.
  106. postrm -- postrm script to use for all packages (as a string)
  107. allow_dirs -- True allow directories to be matched - default False
  108. prepend -- if True, prepend created packages to PACKAGES instead
  109. of the default False which appends them
  110. match_path -- match file_regex on the whole relative path to the
  111. root rather than just the file name
  112. aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
  113. each package, using the actual derived module name
  114. rather than converting it to something legal for a
  115. package name. Can be a single string item or a list
  116. of strings for multiple items. Must include %s.
  117. allow_links -- True to allow symlinks to be matched - default False
  118. summary -- Summary to set for each package. Must include %s;
  119. defaults to description if not set.
  120. """
  121. dvar = d.getVar('PKGD')
  122. root = d.expand(root)
  123. output_pattern = d.expand(output_pattern)
  124. extra_depends = d.expand(extra_depends)
  125. # If the root directory doesn't exist, don't error out later but silently do
  126. # no splitting.
  127. if not os.path.exists(dvar + root):
  128. return []
  129. ml = d.getVar("MLPREFIX")
  130. if ml:
  131. if not output_pattern.startswith(ml):
  132. output_pattern = ml + output_pattern
  133. newdeps = []
  134. for dep in (extra_depends or "").split():
  135. if dep.startswith(ml):
  136. newdeps.append(dep)
  137. else:
  138. newdeps.append(ml + dep)
  139. if newdeps:
  140. extra_depends = " ".join(newdeps)
  141. packages = d.getVar('PACKAGES').split()
  142. split_packages = set()
  143. if postinst:
  144. postinst = '#!/bin/sh\n' + postinst + '\n'
  145. if postrm:
  146. postrm = '#!/bin/sh\n' + postrm + '\n'
  147. if not recursive:
  148. objs = os.listdir(dvar + root)
  149. else:
  150. objs = []
  151. for walkroot, dirs, files in os.walk(dvar + root):
  152. for file in files:
  153. relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
  154. if relpath:
  155. objs.append(relpath)
  156. if extra_depends == None:
  157. extra_depends = d.getVar("PN")
  158. if not summary:
  159. summary = description
  160. for o in sorted(objs):
  161. import re, stat
  162. if match_path:
  163. m = re.match(file_regex, o)
  164. else:
  165. m = re.match(file_regex, os.path.basename(o))
  166. if not m:
  167. continue
  168. f = os.path.join(dvar + root, o)
  169. mode = os.lstat(f).st_mode
  170. if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
  171. continue
  172. on = legitimize_package_name(m.group(1))
  173. pkg = output_pattern % on
  174. split_packages.add(pkg)
  175. if not pkg in packages:
  176. if prepend:
  177. packages = [pkg] + packages
  178. else:
  179. packages.append(pkg)
  180. oldfiles = d.getVar('FILES:' + pkg)
  181. newfile = os.path.join(root, o)
  182. # These names will be passed through glob() so if the filename actually
  183. # contains * or ? (rare, but possible) we need to handle that specially
  184. newfile = newfile.replace('*', '[*]')
  185. newfile = newfile.replace('?', '[?]')
  186. if not oldfiles:
  187. the_files = [newfile]
  188. if aux_files_pattern:
  189. if type(aux_files_pattern) is list:
  190. for fp in aux_files_pattern:
  191. the_files.append(fp % on)
  192. else:
  193. the_files.append(aux_files_pattern % on)
  194. if aux_files_pattern_verbatim:
  195. if type(aux_files_pattern_verbatim) is list:
  196. for fp in aux_files_pattern_verbatim:
  197. the_files.append(fp % m.group(1))
  198. else:
  199. the_files.append(aux_files_pattern_verbatim % m.group(1))
  200. d.setVar('FILES:' + pkg, " ".join(the_files))
  201. else:
  202. d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
  203. if extra_depends != '':
  204. d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
  205. if not d.getVar('DESCRIPTION:' + pkg):
  206. d.setVar('DESCRIPTION:' + pkg, description % on)
  207. if not d.getVar('SUMMARY:' + pkg):
  208. d.setVar('SUMMARY:' + pkg, summary % on)
  209. if postinst:
  210. d.setVar('pkg_postinst:' + pkg, postinst)
  211. if postrm:
  212. d.setVar('pkg_postrm:' + pkg, postrm)
  213. if callable(hook):
  214. hook(f, pkg, file_regex, output_pattern, m.group(1))
  215. d.setVar('PACKAGES', ' '.join(packages))
  216. return list(split_packages)
  217. PACKAGE_DEPENDS += "file-native"
  218. python () {
  219. if d.getVar('PACKAGES') != '':
  220. deps = ""
  221. for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
  222. deps += " %s:do_populate_sysroot" % dep
  223. if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
  224. deps += ' xz-native:do_populate_sysroot'
  225. d.appendVarFlag('do_package', 'depends', deps)
  226. # shlibs requires any DEPENDS to have already packaged for the *.list files
  227. d.appendVarFlag('do_package', 'deptask', " do_packagedata")
  228. }
  229. # Get a list of files from file vars by searching files under current working directory
  230. # The list contains symlinks, directories and normal files.
  231. def files_from_filevars(filevars):
  232. import os,glob
  233. cpath = oe.cachedpath.CachedPath()
  234. files = []
  235. for f in filevars:
  236. if os.path.isabs(f):
  237. f = '.' + f
  238. if not f.startswith("./"):
  239. f = './' + f
  240. globbed = glob.glob(f)
  241. if globbed:
  242. if [ f ] != globbed:
  243. files += globbed
  244. continue
  245. files.append(f)
  246. symlink_paths = []
  247. for ind, f in enumerate(files):
  248. # Handle directory symlinks. Truncate path to the lowest level symlink
  249. parent = ''
  250. for dirname in f.split('/')[:-1]:
  251. parent = os.path.join(parent, dirname)
  252. if dirname == '.':
  253. continue
  254. if cpath.islink(parent):
  255. bb.warn("FILES contains file '%s' which resides under a "
  256. "directory symlink. Please fix the recipe and use the "
  257. "real path for the file." % f[1:])
  258. symlink_paths.append(f)
  259. files[ind] = parent
  260. f = parent
  261. break
  262. if not cpath.islink(f):
  263. if cpath.isdir(f):
  264. newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
  265. if newfiles:
  266. files += newfiles
  267. return files, symlink_paths
  268. # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
  269. def get_conffiles(pkg, d):
  270. pkgdest = d.getVar('PKGDEST')
  271. root = os.path.join(pkgdest, pkg)
  272. cwd = os.getcwd()
  273. os.chdir(root)
  274. conffiles = d.getVar('CONFFILES:%s' % pkg);
  275. if conffiles == None:
  276. conffiles = d.getVar('CONFFILES')
  277. if conffiles == None:
  278. conffiles = ""
  279. conffiles = conffiles.split()
  280. conf_orig_list = files_from_filevars(conffiles)[0]
  281. # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
  282. conf_list = []
  283. for f in conf_orig_list:
  284. if os.path.isdir(f):
  285. continue
  286. if os.path.islink(f):
  287. continue
  288. if not os.path.exists(f):
  289. continue
  290. conf_list.append(f)
  291. # Remove the leading './'
  292. for i in range(0, len(conf_list)):
  293. conf_list[i] = conf_list[i][1:]
  294. os.chdir(cwd)
  295. return conf_list
  296. def checkbuildpath(file, d):
  297. tmpdir = d.getVar('TMPDIR')
  298. with open(file) as f:
  299. file_content = f.read()
  300. if tmpdir in file_content:
  301. return True
  302. return False
  303. def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
  304. debugfiles = {}
  305. for line in dwarfsrcfiles_output.splitlines():
  306. if line.startswith("\t"):
  307. debugfiles[os.path.normpath(line.split()[0])] = ""
  308. return debugfiles.keys()
  309. def source_info(file, d, fatal=True):
  310. import subprocess
  311. cmd = ["dwarfsrcfiles", file]
  312. try:
  313. output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
  314. retval = 0
  315. except subprocess.CalledProcessError as exc:
  316. output = exc.output
  317. retval = exc.returncode
  318. # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
  319. if retval != 0 and retval != 255:
  320. msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
  321. if fatal:
  322. bb.fatal(msg)
  323. bb.note(msg)
  324. debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
  325. return list(debugsources)
  326. def splitdebuginfo(file, dvar, dv, d):
  327. # Function to split a single file into two components, one is the stripped
  328. # target system binary, the other contains any debugging information. The
  329. # two files are linked to reference each other.
  330. #
  331. # return a mapping of files:debugsources
  332. import stat
  333. import subprocess
  334. src = file[len(dvar):]
  335. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
  336. debugfile = dvar + dest
  337. sources = []
  338. if file.endswith(".ko") and file.find("/lib/modules/") != -1:
  339. if oe.package.is_kernel_module_signed(file):
  340. bb.debug(1, "Skip strip on signed module %s" % file)
  341. return (file, sources)
  342. # Split the file...
  343. bb.utils.mkdirhier(os.path.dirname(debugfile))
  344. #bb.note("Split %s -> %s" % (file, debugfile))
  345. # Only store off the hard link reference if we successfully split!
  346. dvar = d.getVar('PKGD')
  347. objcopy = d.getVar("OBJCOPY")
  348. newmode = None
  349. if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
  350. origmode = os.stat(file)[stat.ST_MODE]
  351. newmode = origmode | stat.S_IWRITE | stat.S_IREAD
  352. os.chmod(file, newmode)
  353. # We need to extract the debug src information here...
  354. if dv["srcdir"]:
  355. sources = source_info(file, d)
  356. bb.utils.mkdirhier(os.path.dirname(debugfile))
  357. subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
  358. # Set the debuglink to have the view of the file path on the target
  359. subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
  360. if newmode:
  361. os.chmod(file, origmode)
  362. return (file, sources)
  363. def splitstaticdebuginfo(file, dvar, dv, d):
  364. # Unlike the function above, there is no way to split a static library
  365. # two components. So to get similar results we will copy the unmodified
  366. # static library (containing the debug symbols) into a new directory.
  367. # We will then strip (preserving symbols) the static library in the
  368. # typical location.
  369. #
  370. # return a mapping of files:debugsources
  371. import stat
  372. src = file[len(dvar):]
  373. dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
  374. debugfile = dvar + dest
  375. sources = []
  376. # Copy the file...
  377. bb.utils.mkdirhier(os.path.dirname(debugfile))
  378. #bb.note("Copy %s -> %s" % (file, debugfile))
  379. dvar = d.getVar('PKGD')
  380. newmode = None
  381. if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
  382. origmode = os.stat(file)[stat.ST_MODE]
  383. newmode = origmode | stat.S_IWRITE | stat.S_IREAD
  384. os.chmod(file, newmode)
  385. # We need to extract the debug src information here...
  386. if dv["srcdir"]:
  387. sources = source_info(file, d)
  388. bb.utils.mkdirhier(os.path.dirname(debugfile))
  389. # Copy the unmodified item to the debug directory
  390. shutil.copy2(file, debugfile)
  391. if newmode:
  392. os.chmod(file, origmode)
  393. return (file, sources)
  394. def inject_minidebuginfo(file, dvar, dv, d):
  395. # Extract just the symbols from debuginfo into minidebuginfo,
  396. # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
  397. # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
  398. import subprocess
  399. readelf = d.getVar('READELF')
  400. nm = d.getVar('NM')
  401. objcopy = d.getVar('OBJCOPY')
  402. minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
  403. src = file[len(dvar):]
  404. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
  405. debugfile = dvar + dest
  406. minidebugfile = minidebuginfodir + src + '.minidebug'
  407. bb.utils.mkdirhier(os.path.dirname(minidebugfile))
  408. # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
  409. # so skip it.
  410. if not os.path.exists(debugfile):
  411. bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
  412. return
  413. # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
  414. # We will exclude all of these from minidebuginfo to save space.
  415. remove_section_names = []
  416. for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
  417. fields = line.split()
  418. if len(fields) < 8:
  419. continue
  420. name = fields[0]
  421. type = fields[1]
  422. flags = fields[7]
  423. # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
  424. if name.startswith('.debug_'):
  425. continue
  426. if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
  427. remove_section_names.append(name)
  428. # List dynamic symbols in the binary. We can exclude these from minidebuginfo
  429. # because they are always present in the binary.
  430. dynsyms = set()
  431. for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
  432. dynsyms.add(line.split()[0])
  433. # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
  434. # These are the ones we want to keep in minidebuginfo.
  435. keep_symbols_file = minidebugfile + '.symlist'
  436. found_any_symbols = False
  437. with open(keep_symbols_file, 'w') as f:
  438. for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
  439. fields = line.split('|')
  440. if len(fields) < 7:
  441. continue
  442. name = fields[0].strip()
  443. type = fields[3].strip()
  444. if type == 'FUNC' and name not in dynsyms:
  445. f.write('{}\n'.format(name))
  446. found_any_symbols = True
  447. if not found_any_symbols:
  448. bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
  449. return
  450. bb.utils.remove(minidebugfile)
  451. bb.utils.remove(minidebugfile + '.xz')
  452. subprocess.check_call([objcopy, '-S'] +
  453. ['--remove-section={}'.format(s) for s in remove_section_names] +
  454. ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
  455. subprocess.check_call(['xz', '--keep', minidebugfile])
  456. subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
  457. def copydebugsources(debugsrcdir, sources, d):
  458. # The debug src information written out to sourcefile is further processed
  459. # and copied to the destination here.
  460. import stat
  461. import subprocess
  462. if debugsrcdir and sources:
  463. sourcefile = d.expand("${WORKDIR}/debugsources.list")
  464. bb.utils.remove(sourcefile)
  465. # filenames are null-separated - this is an artefact of the previous use
  466. # of rpm's debugedit, which was writing them out that way, and the code elsewhere
  467. # is still assuming that.
  468. debuglistoutput = '\0'.join(sources) + '\0'
  469. with open(sourcefile, 'a') as sf:
  470. sf.write(debuglistoutput)
  471. dvar = d.getVar('PKGD')
  472. strip = d.getVar("STRIP")
  473. objcopy = d.getVar("OBJCOPY")
  474. workdir = d.getVar("WORKDIR")
  475. sdir = d.getVar("S")
  476. cflags = d.expand("${CFLAGS}")
  477. prefixmap = {}
  478. for flag in cflags.split():
  479. if not flag.startswith("-fdebug-prefix-map"):
  480. continue
  481. if "recipe-sysroot" in flag:
  482. continue
  483. flag = flag.split("=")
  484. prefixmap[flag[1]] = flag[2]
  485. nosuchdir = []
  486. basepath = dvar
  487. for p in debugsrcdir.split("/"):
  488. basepath = basepath + "/" + p
  489. if not cpath.exists(basepath):
  490. nosuchdir.append(basepath)
  491. bb.utils.mkdirhier(basepath)
  492. cpath.updatecache(basepath)
  493. for pmap in prefixmap:
  494. # Ignore files from the recipe sysroots (target and native)
  495. cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
  496. # We need to ignore files that are not actually ours
  497. # we do this by only paying attention to items from this package
  498. cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
  499. # Remove prefix in the source paths
  500. cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
  501. cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
  502. try:
  503. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  504. except subprocess.CalledProcessError:
  505. # Can "fail" if internal headers/transient sources are attempted
  506. pass
  507. # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
  508. # Work around this by manually finding and copying any symbolic links that made it through.
  509. cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
  510. (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
  511. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  512. # debugsources.list may be polluted from the host if we used externalsrc,
  513. # cpio uses copy-pass and may have just created a directory structure
  514. # matching the one from the host, if thats the case move those files to
  515. # debugsrcdir to avoid host contamination.
  516. # Empty dir structure will be deleted in the next step.
  517. # Same check as above for externalsrc
  518. if workdir not in sdir:
  519. if os.path.exists(dvar + debugsrcdir + sdir):
  520. cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
  521. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  522. # The copy by cpio may have resulted in some empty directories! Remove these
  523. cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
  524. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  525. # Also remove debugsrcdir if its empty
  526. for p in nosuchdir[::-1]:
  527. if os.path.exists(p) and not os.listdir(p):
  528. os.rmdir(p)
  529. #
  530. # Package data handling routines
  531. #
  532. def get_package_mapping (pkg, basepkg, d, depversions=None):
  533. import oe.packagedata
  534. data = oe.packagedata.read_subpkgdata(pkg, d)
  535. key = "PKG:%s" % pkg
  536. if key in data:
  537. if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
  538. bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
  539. # Have to avoid undoing the write_extra_pkgs(global_variants...)
  540. if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
  541. and data[key] == basepkg:
  542. return pkg
  543. if depversions == []:
  544. # Avoid returning a mapping if the renamed package rprovides its original name
  545. rprovkey = "RPROVIDES:%s" % pkg
  546. if rprovkey in data:
  547. if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
  548. bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
  549. return pkg
  550. # Do map to rewritten package name
  551. return data[key]
  552. return pkg
  553. def get_package_additional_metadata (pkg_type, d):
  554. base_key = "PACKAGE_ADD_METADATA"
  555. for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
  556. if d.getVar(key, False) is None:
  557. continue
  558. d.setVarFlag(key, "type", "list")
  559. if d.getVarFlag(key, "separator") is None:
  560. d.setVarFlag(key, "separator", "\\n")
  561. metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
  562. return "\n".join(metadata_fields).strip()
  563. def runtime_mapping_rename (varname, pkg, d):
  564. #bb.note("%s before: %s" % (varname, d.getVar(varname)))
  565. new_depends = {}
  566. deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
  567. for depend, depversions in deps.items():
  568. new_depend = get_package_mapping(depend, pkg, d, depversions)
  569. if depend != new_depend:
  570. bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
  571. new_depends[new_depend] = deps[depend]
  572. d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
  573. #bb.note("%s after: %s" % (varname, d.getVar(varname)))
  574. #
  575. # Used by do_packagedata (and possibly other routines post do_package)
  576. #
  577. PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
  578. PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
  579. package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
  580. package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
  581. python package_get_auto_pr() {
  582. import oe.prservice
  583. def get_do_package_hash(pn):
  584. if d.getVar("BB_RUNTASK") != "do_package":
  585. taskdepdata = d.getVar("BB_TASKDEPDATA", False)
  586. for dep in taskdepdata:
  587. if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
  588. return taskdepdata[dep][6]
  589. return None
  590. # Support per recipe PRSERV_HOST
  591. pn = d.getVar('PN')
  592. host = d.getVar("PRSERV_HOST_" + pn)
  593. if not (host is None):
  594. d.setVar("PRSERV_HOST", host)
  595. pkgv = d.getVar("PKGV")
  596. # PR Server not active, handle AUTOINC
  597. if not d.getVar('PRSERV_HOST'):
  598. d.setVar("PRSERV_PV_AUTOINC", "0")
  599. return
  600. auto_pr = None
  601. pv = d.getVar("PV")
  602. version = d.getVar("PRAUTOINX")
  603. pkgarch = d.getVar("PACKAGE_ARCH")
  604. checksum = get_do_package_hash(pn)
  605. # If do_package isn't in the dependencies, we can't get the checksum...
  606. if not checksum:
  607. bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
  608. #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
  609. #for dep in taskdepdata:
  610. # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
  611. return
  612. if d.getVar('PRSERV_LOCKDOWN'):
  613. auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
  614. if auto_pr is None:
  615. bb.fatal("Can NOT get PRAUTO from lockdown exported file")
  616. d.setVar('PRAUTO',str(auto_pr))
  617. return
  618. try:
  619. conn = oe.prservice.prserv_make_conn(d)
  620. if conn is not None:
  621. if "AUTOINC" in pkgv:
  622. srcpv = bb.fetch2.get_srcrev(d)
  623. base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
  624. value = conn.getPR(base_ver, pkgarch, srcpv)
  625. d.setVar("PRSERV_PV_AUTOINC", str(value))
  626. auto_pr = conn.getPR(version, pkgarch, checksum)
  627. conn.close()
  628. except Exception as e:
  629. bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
  630. if auto_pr is None:
  631. bb.fatal("Can NOT get PRAUTO from remote PR service")
  632. d.setVar('PRAUTO',str(auto_pr))
  633. }
  634. #
  635. # Package functions suitable for inclusion in PACKAGEFUNCS
  636. #
  637. python package_convert_pr_autoinc() {
  638. pkgv = d.getVar("PKGV")
  639. # Adjust pkgv as necessary...
  640. if 'AUTOINC' in pkgv:
  641. d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
  642. # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
  643. d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
  644. d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
  645. }
  646. LOCALEBASEPN ??= "${PN}"
  647. python package_do_split_locales() {
  648. if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
  649. bb.debug(1, "package requested not splitting locales")
  650. return
  651. packages = (d.getVar('PACKAGES') or "").split()
  652. datadir = d.getVar('datadir')
  653. if not datadir:
  654. bb.note("datadir not defined")
  655. return
  656. dvar = d.getVar('PKGD')
  657. pn = d.getVar('LOCALEBASEPN')
  658. if pn + '-locale' in packages:
  659. packages.remove(pn + '-locale')
  660. localedir = os.path.join(dvar + datadir, 'locale')
  661. if not cpath.isdir(localedir):
  662. bb.debug(1, "No locale files in this package")
  663. return
  664. locales = os.listdir(localedir)
  665. summary = d.getVar('SUMMARY') or pn
  666. description = d.getVar('DESCRIPTION') or ""
  667. locale_section = d.getVar('LOCALE_SECTION')
  668. mlprefix = d.getVar('MLPREFIX') or ""
  669. for l in sorted(locales):
  670. ln = legitimize_package_name(l)
  671. pkg = pn + '-locale-' + ln
  672. packages.append(pkg)
  673. d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
  674. d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
  675. d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
  676. d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
  677. d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
  678. if locale_section:
  679. d.setVar('SECTION:' + pkg, locale_section)
  680. d.setVar('PACKAGES', ' '.join(packages))
  681. # Disabled by RP 18/06/07
  682. # Wildcards aren't supported in debian
  683. # They break with ipkg since glibc-locale* will mean that
  684. # glibc-localedata-translit* won't install as a dependency
  685. # for some other package which breaks meta-toolchain
  686. # Probably breaks since virtual-locale- isn't provided anywhere
  687. #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
  688. #rdep.append('%s-locale*' % pn)
  689. #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
  690. }
  691. python perform_packagecopy () {
  692. import subprocess
  693. import shutil
  694. dest = d.getVar('D')
  695. dvar = d.getVar('PKGD')
  696. # Start by package population by taking a copy of the installed
  697. # files to operate on
  698. # Preserve sparse files and hard links
  699. cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
  700. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  701. # replace RPATHs for the nativesdk binaries, to make them relocatable
  702. if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
  703. rpath_replace (dvar, d)
  704. }
  705. perform_packagecopy[cleandirs] = "${PKGD}"
  706. perform_packagecopy[dirs] = "${PKGD}"
  707. # We generate a master list of directories to process, we start by
  708. # seeding this list with reasonable defaults, then load from
  709. # the fs-perms.txt files
  710. python fixup_perms () {
  711. import pwd, grp
  712. # init using a string with the same format as a line as documented in
  713. # the fs-perms.txt file
  714. # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
  715. # <path> link <link target>
  716. #
  717. # __str__ can be used to print out an entry in the input format
  718. #
  719. # if fs_perms_entry.path is None:
  720. # an error occurred
  721. # if fs_perms_entry.link, you can retrieve:
  722. # fs_perms_entry.path = path
  723. # fs_perms_entry.link = target of link
  724. # if not fs_perms_entry.link, you can retrieve:
  725. # fs_perms_entry.path = path
  726. # fs_perms_entry.mode = expected dir mode or None
  727. # fs_perms_entry.uid = expected uid or -1
  728. # fs_perms_entry.gid = expected gid or -1
  729. # fs_perms_entry.walk = 'true' or something else
  730. # fs_perms_entry.fmode = expected file mode or None
  731. # fs_perms_entry.fuid = expected file uid or -1
  732. # fs_perms_entry_fgid = expected file gid or -1
  733. class fs_perms_entry():
  734. def __init__(self, line):
  735. lsplit = line.split()
  736. if len(lsplit) == 3 and lsplit[1].lower() == "link":
  737. self._setlink(lsplit[0], lsplit[2])
  738. elif len(lsplit) == 8:
  739. self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
  740. else:
  741. msg = "Fixup Perms: invalid config line %s" % line
  742. oe.qa.handle_error("perm-config", msg, d)
  743. self.path = None
  744. self.link = None
  745. def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
  746. self.path = os.path.normpath(path)
  747. self.link = None
  748. self.mode = self._procmode(mode)
  749. self.uid = self._procuid(uid)
  750. self.gid = self._procgid(gid)
  751. self.walk = walk.lower()
  752. self.fmode = self._procmode(fmode)
  753. self.fuid = self._procuid(fuid)
  754. self.fgid = self._procgid(fgid)
  755. def _setlink(self, path, link):
  756. self.path = os.path.normpath(path)
  757. self.link = link
  758. def _procmode(self, mode):
  759. if not mode or (mode and mode == "-"):
  760. return None
  761. else:
  762. return int(mode,8)
  763. # Note uid/gid -1 has special significance in os.lchown
  764. def _procuid(self, uid):
  765. if uid is None or uid == "-":
  766. return -1
  767. elif uid.isdigit():
  768. return int(uid)
  769. else:
  770. return pwd.getpwnam(uid).pw_uid
  771. def _procgid(self, gid):
  772. if gid is None or gid == "-":
  773. return -1
  774. elif gid.isdigit():
  775. return int(gid)
  776. else:
  777. return grp.getgrnam(gid).gr_gid
  778. # Use for debugging the entries
  779. def __str__(self):
  780. if self.link:
  781. return "%s link %s" % (self.path, self.link)
  782. else:
  783. mode = "-"
  784. if self.mode:
  785. mode = "0%o" % self.mode
  786. fmode = "-"
  787. if self.fmode:
  788. fmode = "0%o" % self.fmode
  789. uid = self._mapugid(self.uid)
  790. gid = self._mapugid(self.gid)
  791. fuid = self._mapugid(self.fuid)
  792. fgid = self._mapugid(self.fgid)
  793. return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
  794. def _mapugid(self, id):
  795. if id is None or id == -1:
  796. return "-"
  797. else:
  798. return "%d" % id
  799. # Fix the permission, owner and group of path
  800. def fix_perms(path, mode, uid, gid, dir):
  801. if mode and not os.path.islink(path):
  802. #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
  803. os.chmod(path, mode)
  804. # -1 is a special value that means don't change the uid/gid
  805. # if they are BOTH -1, don't bother to lchown
  806. if not (uid == -1 and gid == -1):
  807. #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
  808. os.lchown(path, uid, gid)
  809. # Return a list of configuration files based on either the default
  810. # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
  811. # paths are resolved via BBPATH
  812. def get_fs_perms_list(d):
  813. str = ""
  814. bbpath = d.getVar('BBPATH')
  815. fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
  816. for conf_file in fs_perms_tables.split():
  817. confpath = bb.utils.which(bbpath, conf_file)
  818. if confpath:
  819. str += " %s" % bb.utils.which(bbpath, conf_file)
  820. else:
  821. bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
  822. return str
  823. dvar = d.getVar('PKGD')
  824. fs_perms_table = {}
  825. fs_link_table = {}
  826. # By default all of the standard directories specified in
  827. # bitbake.conf will get 0755 root:root.
  828. target_path_vars = [ 'base_prefix',
  829. 'prefix',
  830. 'exec_prefix',
  831. 'base_bindir',
  832. 'base_sbindir',
  833. 'base_libdir',
  834. 'datadir',
  835. 'sysconfdir',
  836. 'servicedir',
  837. 'sharedstatedir',
  838. 'localstatedir',
  839. 'infodir',
  840. 'mandir',
  841. 'docdir',
  842. 'bindir',
  843. 'sbindir',
  844. 'libexecdir',
  845. 'libdir',
  846. 'includedir',
  847. 'oldincludedir' ]
  848. for path in target_path_vars:
  849. dir = d.getVar(path) or ""
  850. if dir == "":
  851. continue
  852. fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
  853. # Now we actually load from the configuration files
  854. for conf in get_fs_perms_list(d).split():
  855. if not os.path.exists(conf):
  856. continue
  857. with open(conf) as f:
  858. for line in f:
  859. if line.startswith('#'):
  860. continue
  861. lsplit = line.split()
  862. if len(lsplit) == 0:
  863. continue
  864. if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
  865. msg = "Fixup perms: %s invalid line: %s" % (conf, line)
  866. oe.qa.handle_error("perm-line", msg, d)
  867. continue
  868. entry = fs_perms_entry(d.expand(line))
  869. if entry and entry.path:
  870. if entry.link:
  871. fs_link_table[entry.path] = entry
  872. if entry.path in fs_perms_table:
  873. fs_perms_table.pop(entry.path)
  874. else:
  875. fs_perms_table[entry.path] = entry
  876. if entry.path in fs_link_table:
  877. fs_link_table.pop(entry.path)
  878. # Debug -- list out in-memory table
  879. #for dir in fs_perms_table:
  880. # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
  881. #for link in fs_link_table:
  882. # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
  883. # We process links first, so we can go back and fixup directory ownership
  884. # for any newly created directories
  885. # Process in sorted order so /run gets created before /run/lock, etc.
  886. for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
  887. link = entry.link
  888. dir = entry.path
  889. origin = dvar + dir
  890. if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
  891. continue
  892. if link[0] == "/":
  893. target = dvar + link
  894. ptarget = link
  895. else:
  896. target = os.path.join(os.path.dirname(origin), link)
  897. ptarget = os.path.join(os.path.dirname(dir), link)
  898. if os.path.exists(target):
  899. msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
  900. oe.qa.handle_error("perm-link", msg, d)
  901. continue
  902. # Create path to move directory to, move it, and then setup the symlink
  903. bb.utils.mkdirhier(os.path.dirname(target))
  904. #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
  905. bb.utils.rename(origin, target)
  906. #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
  907. os.symlink(link, origin)
  908. for dir in fs_perms_table:
  909. origin = dvar + dir
  910. if not (cpath.exists(origin) and cpath.isdir(origin)):
  911. continue
  912. fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
  913. if fs_perms_table[dir].walk == 'true':
  914. for root, dirs, files in os.walk(origin):
  915. for dr in dirs:
  916. each_dir = os.path.join(root, dr)
  917. fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
  918. for f in files:
  919. each_file = os.path.join(root, f)
  920. fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
  921. }
  922. def package_debug_vars(d):
  923. # We default to '.debug' style
  924. if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
  925. # Single debug-file-directory style debug info
  926. debug_vars = {
  927. "append": ".debug",
  928. "staticappend": "",
  929. "dir": "",
  930. "staticdir": "",
  931. "libdir": "/usr/lib/debug",
  932. "staticlibdir": "/usr/lib/debug-static",
  933. "srcdir": "/usr/src/debug",
  934. }
  935. elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
  936. # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
  937. debug_vars = {
  938. "append": "",
  939. "staticappend": "",
  940. "dir": "/.debug",
  941. "staticdir": "/.debug-static",
  942. "libdir": "",
  943. "staticlibdir": "",
  944. "srcdir": "",
  945. }
  946. elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
  947. debug_vars = {
  948. "append": "",
  949. "staticappend": "",
  950. "dir": "/.debug",
  951. "staticdir": "/.debug-static",
  952. "libdir": "",
  953. "staticlibdir": "",
  954. "srcdir": "/usr/src/debug",
  955. }
  956. else:
  957. # Original OE-core, a.k.a. ".debug", style debug info
  958. debug_vars = {
  959. "append": "",
  960. "staticappend": "",
  961. "dir": "/.debug",
  962. "staticdir": "/.debug-static",
  963. "libdir": "",
  964. "staticlibdir": "",
  965. "srcdir": "/usr/src/debug",
  966. }
  967. return debug_vars
  968. python split_and_strip_files () {
  969. import stat, errno
  970. import subprocess
  971. dvar = d.getVar('PKGD')
  972. pn = d.getVar('PN')
  973. hostos = d.getVar('HOST_OS')
  974. oldcwd = os.getcwd()
  975. os.chdir(dvar)
  976. dv = package_debug_vars(d)
  977. #
  978. # First lets figure out all of the files we may have to process ... do this only once!
  979. #
  980. elffiles = {}
  981. symlinks = {}
  982. staticlibs = []
  983. inodes = {}
  984. libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
  985. baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
  986. skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
  987. if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
  988. d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
  989. checkelf = {}
  990. checkelflinks = {}
  991. for root, dirs, files in cpath.walk(dvar):
  992. for f in files:
  993. file = os.path.join(root, f)
  994. # Skip debug files
  995. if dv["append"] and file.endswith(dv["append"]):
  996. continue
  997. if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
  998. continue
  999. if file in skipfiles:
  1000. continue
  1001. if oe.package.is_static_lib(file):
  1002. staticlibs.append(file)
  1003. continue
  1004. try:
  1005. ltarget = cpath.realpath(file, dvar, False)
  1006. s = cpath.lstat(ltarget)
  1007. except OSError as e:
  1008. (err, strerror) = e.args
  1009. if err != errno.ENOENT:
  1010. raise
  1011. # Skip broken symlinks
  1012. continue
  1013. if not s:
  1014. continue
  1015. # Check its an executable
  1016. if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
  1017. or (s[stat.ST_MODE] & stat.S_IXOTH) \
  1018. or ((file.startswith(libdir) or file.startswith(baselibdir)) \
  1019. and (".so" in f or ".node" in f)) \
  1020. or (f.startswith('vmlinux') or ".ko" in f):
  1021. if cpath.islink(file):
  1022. checkelflinks[file] = ltarget
  1023. continue
  1024. # Use a reference of device ID and inode number to identify files
  1025. file_reference = "%d_%d" % (s.st_dev, s.st_ino)
  1026. checkelf[file] = (file, file_reference)
  1027. results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
  1028. results_map = {}
  1029. for (ltarget, elf_file) in results:
  1030. results_map[ltarget] = elf_file
  1031. for file in checkelflinks:
  1032. ltarget = checkelflinks[file]
  1033. # If it's a symlink, and points to an ELF file, we capture the readlink target
  1034. if results_map[ltarget]:
  1035. target = os.readlink(file)
  1036. #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
  1037. symlinks[file] = target
  1038. results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
  1039. # Sort results by file path. This ensures that the files are always
  1040. # processed in the same order, which is important to make sure builds
  1041. # are reproducible when dealing with hardlinks
  1042. results.sort(key=lambda x: x[0])
  1043. for (file, elf_file) in results:
  1044. # It's a file (or hardlink), not a link
  1045. # ...but is it ELF, and is it already stripped?
  1046. if elf_file & 1:
  1047. if elf_file & 2:
  1048. if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
  1049. bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
  1050. else:
  1051. msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
  1052. oe.qa.handle_error("already-stripped", msg, d)
  1053. continue
  1054. # At this point we have an unstripped elf file. We need to:
  1055. # a) Make sure any file we strip is not hardlinked to anything else outside this tree
  1056. # b) Only strip any hardlinked file once (no races)
  1057. # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
  1058. # Use a reference of device ID and inode number to identify files
  1059. file_reference = checkelf[file][1]
  1060. if file_reference in inodes:
  1061. os.unlink(file)
  1062. os.link(inodes[file_reference][0], file)
  1063. inodes[file_reference].append(file)
  1064. else:
  1065. inodes[file_reference] = [file]
  1066. # break hardlink
  1067. bb.utils.break_hardlinks(file)
  1068. elffiles[file] = elf_file
  1069. # Modified the file so clear the cache
  1070. cpath.updatecache(file)
  1071. def strip_pkgd_prefix(f):
  1072. nonlocal dvar
  1073. if f.startswith(dvar):
  1074. return f[len(dvar):]
  1075. return f
  1076. #
  1077. # First lets process debug splitting
  1078. #
  1079. if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
  1080. results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
  1081. if dv["srcdir"] and not hostos.startswith("mingw"):
  1082. if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
  1083. results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
  1084. else:
  1085. for file in staticlibs:
  1086. results.append( (file,source_info(file, d)) )
  1087. d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
  1088. sources = set()
  1089. for r in results:
  1090. sources.update(r[1])
  1091. # Hardlink our debug symbols to the other hardlink copies
  1092. for ref in inodes:
  1093. if len(inodes[ref]) == 1:
  1094. continue
  1095. target = inodes[ref][0][len(dvar):]
  1096. for file in inodes[ref][1:]:
  1097. src = file[len(dvar):]
  1098. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
  1099. fpath = dvar + dest
  1100. ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
  1101. bb.utils.mkdirhier(os.path.dirname(fpath))
  1102. # Only one hardlink of separated debug info file in each directory
  1103. if not os.access(fpath, os.R_OK):
  1104. #bb.note("Link %s -> %s" % (fpath, ftarget))
  1105. os.link(ftarget, fpath)
  1106. # Create symlinks for all cases we were able to split symbols
  1107. for file in symlinks:
  1108. src = file[len(dvar):]
  1109. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
  1110. fpath = dvar + dest
  1111. # Skip it if the target doesn't exist
  1112. try:
  1113. s = os.stat(fpath)
  1114. except OSError as e:
  1115. (err, strerror) = e.args
  1116. if err != errno.ENOENT:
  1117. raise
  1118. continue
  1119. ltarget = symlinks[file]
  1120. lpath = os.path.dirname(ltarget)
  1121. lbase = os.path.basename(ltarget)
  1122. ftarget = ""
  1123. if lpath and lpath != ".":
  1124. ftarget += lpath + dv["dir"] + "/"
  1125. ftarget += lbase + dv["append"]
  1126. if lpath.startswith(".."):
  1127. ftarget = os.path.join("..", ftarget)
  1128. bb.utils.mkdirhier(os.path.dirname(fpath))
  1129. #bb.note("Symlink %s -> %s" % (fpath, ftarget))
  1130. os.symlink(ftarget, fpath)
  1131. # Process the dv["srcdir"] if requested...
  1132. # This copies and places the referenced sources for later debugging...
  1133. copydebugsources(dv["srcdir"], sources, d)
  1134. #
  1135. # End of debug splitting
  1136. #
  1137. #
  1138. # Now lets go back over things and strip them
  1139. #
  1140. if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
  1141. strip = d.getVar("STRIP")
  1142. sfiles = []
  1143. for file in elffiles:
  1144. elf_file = int(elffiles[file])
  1145. #bb.note("Strip %s" % file)
  1146. sfiles.append((file, elf_file, strip))
  1147. if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
  1148. for f in staticlibs:
  1149. sfiles.append((f, 16, strip))
  1150. oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
  1151. # Build "minidebuginfo" and reinject it back into the stripped binaries
  1152. if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
  1153. oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
  1154. extraargs=(dvar, dv, d))
  1155. #
  1156. # End of strip
  1157. #
  1158. os.chdir(oldcwd)
  1159. }
  1160. python populate_packages () {
  1161. import glob, re
  1162. workdir = d.getVar('WORKDIR')
  1163. outdir = d.getVar('DEPLOY_DIR')
  1164. dvar = d.getVar('PKGD')
  1165. packages = d.getVar('PACKAGES').split()
  1166. pn = d.getVar('PN')
  1167. bb.utils.mkdirhier(outdir)
  1168. os.chdir(dvar)
  1169. autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
  1170. split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
  1171. # If debug-with-srcpkg mode is enabled then add the source package if it
  1172. # doesn't exist and add the source file contents to the source package.
  1173. if split_source_package:
  1174. src_package_name = ('%s-src' % d.getVar('PN'))
  1175. if not src_package_name in packages:
  1176. packages.append(src_package_name)
  1177. d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
  1178. # Sanity check PACKAGES for duplicates
  1179. # Sanity should be moved to sanity.bbclass once we have the infrastructure
  1180. package_dict = {}
  1181. for i, pkg in enumerate(packages):
  1182. if pkg in package_dict:
  1183. msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
  1184. oe.qa.handle_error("packages-list", msg, d)
  1185. # Ensure the source package gets the chance to pick up the source files
  1186. # before the debug package by ordering it first in PACKAGES. Whether it
  1187. # actually picks up any source files is controlled by
  1188. # PACKAGE_DEBUG_SPLIT_STYLE.
  1189. elif pkg.endswith("-src"):
  1190. package_dict[pkg] = (10, i)
  1191. elif autodebug and pkg.endswith("-dbg"):
  1192. package_dict[pkg] = (30, i)
  1193. else:
  1194. package_dict[pkg] = (50, i)
  1195. packages = sorted(package_dict.keys(), key=package_dict.get)
  1196. d.setVar('PACKAGES', ' '.join(packages))
  1197. pkgdest = d.getVar('PKGDEST')
  1198. seen = []
  1199. # os.mkdir masks the permissions with umask so we have to unset it first
  1200. oldumask = os.umask(0)
  1201. debug = []
  1202. for root, dirs, files in cpath.walk(dvar):
  1203. dir = root[len(dvar):]
  1204. if not dir:
  1205. dir = os.sep
  1206. for f in (files + dirs):
  1207. path = "." + os.path.join(dir, f)
  1208. if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
  1209. debug.append(path)
  1210. for pkg in packages:
  1211. root = os.path.join(pkgdest, pkg)
  1212. bb.utils.mkdirhier(root)
  1213. filesvar = d.getVar('FILES:%s' % pkg) or ""
  1214. if "//" in filesvar:
  1215. msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
  1216. oe.qa.handle_error("files-invalid", msg, d)
  1217. filesvar.replace("//", "/")
  1218. origfiles = filesvar.split()
  1219. files, symlink_paths = files_from_filevars(origfiles)
  1220. if autodebug and pkg.endswith("-dbg"):
  1221. files.extend(debug)
  1222. for file in files:
  1223. if (not cpath.islink(file)) and (not cpath.exists(file)):
  1224. continue
  1225. if file in seen:
  1226. continue
  1227. seen.append(file)
  1228. def mkdir(src, dest, p):
  1229. src = os.path.join(src, p)
  1230. dest = os.path.join(dest, p)
  1231. fstat = cpath.stat(src)
  1232. os.mkdir(dest)
  1233. os.chmod(dest, fstat.st_mode)
  1234. os.chown(dest, fstat.st_uid, fstat.st_gid)
  1235. if p not in seen:
  1236. seen.append(p)
  1237. cpath.updatecache(dest)
  1238. def mkdir_recurse(src, dest, paths):
  1239. if cpath.exists(dest + '/' + paths):
  1240. return
  1241. while paths.startswith("./"):
  1242. paths = paths[2:]
  1243. p = "."
  1244. for c in paths.split("/"):
  1245. p = os.path.join(p, c)
  1246. if not cpath.exists(os.path.join(dest, p)):
  1247. mkdir(src, dest, p)
  1248. if cpath.isdir(file) and not cpath.islink(file):
  1249. mkdir_recurse(dvar, root, file)
  1250. continue
  1251. mkdir_recurse(dvar, root, os.path.dirname(file))
  1252. fpath = os.path.join(root,file)
  1253. if not cpath.islink(file):
  1254. if not os.path.exists(fpath):
  1255. os.link(file, fpath)
  1256. continue
  1257. ret = bb.utils.copyfile(file, fpath)
  1258. if ret is False or ret == 0:
  1259. bb.fatal("File population failed")
  1260. # Check if symlink paths exist
  1261. for file in symlink_paths:
  1262. if not os.path.exists(os.path.join(root,file)):
  1263. bb.fatal("File '%s' cannot be packaged into '%s' because its "
  1264. "parent directory structure does not exist. One of "
  1265. "its parent directories is a symlink whose target "
  1266. "directory is not included in the package." %
  1267. (file, pkg))
  1268. os.umask(oldumask)
  1269. os.chdir(workdir)
  1270. # Handle excluding packages with incompatible licenses
  1271. package_list = []
  1272. for pkg in packages:
  1273. licenses = d.getVar('_exclude_incompatible-' + pkg)
  1274. if licenses:
  1275. msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
  1276. oe.qa.handle_error("incompatible-license", msg, d)
  1277. else:
  1278. package_list.append(pkg)
  1279. d.setVar('PACKAGES', ' '.join(package_list))
  1280. unshipped = []
  1281. for root, dirs, files in cpath.walk(dvar):
  1282. dir = root[len(dvar):]
  1283. if not dir:
  1284. dir = os.sep
  1285. for f in (files + dirs):
  1286. path = os.path.join(dir, f)
  1287. if ('.' + path) not in seen:
  1288. unshipped.append(path)
  1289. if unshipped != []:
  1290. msg = pn + ": Files/directories were installed but not shipped in any package:"
  1291. if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
  1292. bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
  1293. else:
  1294. for f in unshipped:
  1295. msg = msg + "\n " + f
  1296. msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
  1297. msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
  1298. oe.qa.handle_error("installed-vs-shipped", msg, d)
  1299. }
  1300. populate_packages[dirs] = "${D}"
  1301. python package_fixsymlinks () {
  1302. import errno
  1303. pkgdest = d.getVar('PKGDEST')
  1304. packages = d.getVar("PACKAGES", False).split()
  1305. dangling_links = {}
  1306. pkg_files = {}
  1307. for pkg in packages:
  1308. dangling_links[pkg] = []
  1309. pkg_files[pkg] = []
  1310. inst_root = os.path.join(pkgdest, pkg)
  1311. for path in pkgfiles[pkg]:
  1312. rpath = path[len(inst_root):]
  1313. pkg_files[pkg].append(rpath)
  1314. rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
  1315. if not cpath.lexists(rtarget):
  1316. dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
  1317. newrdepends = {}
  1318. for pkg in dangling_links:
  1319. for l in dangling_links[pkg]:
  1320. found = False
  1321. bb.debug(1, "%s contains dangling link %s" % (pkg, l))
  1322. for p in packages:
  1323. if l in pkg_files[p]:
  1324. found = True
  1325. bb.debug(1, "target found in %s" % p)
  1326. if p == pkg:
  1327. break
  1328. if pkg not in newrdepends:
  1329. newrdepends[pkg] = []
  1330. newrdepends[pkg].append(p)
  1331. break
  1332. if found == False:
  1333. bb.note("%s contains dangling symlink to %s" % (pkg, l))
  1334. for pkg in newrdepends:
  1335. rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
  1336. for p in newrdepends[pkg]:
  1337. if p not in rdepends:
  1338. rdepends[p] = []
  1339. d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
  1340. }
  1341. python package_package_name_hook() {
  1342. """
  1343. A package_name_hook function can be used to rewrite the package names by
  1344. changing PKG. For an example, see debian.bbclass.
  1345. """
  1346. pass
  1347. }
  1348. EXPORT_FUNCTIONS package_name_hook
  1349. PKGDESTWORK = "${WORKDIR}/pkgdata"
  1350. PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
  1351. python emit_pkgdata() {
  1352. from glob import glob
  1353. import json
  1354. import bb.compress.zstd
  1355. def process_postinst_on_target(pkg, mlprefix):
  1356. pkgval = d.getVar('PKG:%s' % pkg)
  1357. if pkgval is None:
  1358. pkgval = pkg
  1359. defer_fragment = """
  1360. if [ -n "$D" ]; then
  1361. $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
  1362. exit 0
  1363. fi
  1364. """ % (pkgval, mlprefix)
  1365. postinst = d.getVar('pkg_postinst:%s' % pkg)
  1366. postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
  1367. if postinst_ontarget:
  1368. bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
  1369. if not postinst:
  1370. postinst = '#!/bin/sh\n'
  1371. postinst += defer_fragment
  1372. postinst += postinst_ontarget
  1373. d.setVar('pkg_postinst:%s' % pkg, postinst)
  1374. def add_set_e_to_scriptlets(pkg):
  1375. for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
  1376. scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
  1377. if scriptlet:
  1378. scriptlet_split = scriptlet.split('\n')
  1379. if scriptlet_split[0].startswith("#!"):
  1380. scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
  1381. else:
  1382. scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
  1383. d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
  1384. def write_if_exists(f, pkg, var):
  1385. def encode(str):
  1386. import codecs
  1387. c = codecs.getencoder("unicode_escape")
  1388. return c(str)[0].decode("latin1")
  1389. val = d.getVar('%s:%s' % (var, pkg))
  1390. if val:
  1391. f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
  1392. return val
  1393. val = d.getVar('%s' % (var))
  1394. if val:
  1395. f.write('%s: %s\n' % (var, encode(val)))
  1396. return val
  1397. def write_extra_pkgs(variants, pn, packages, pkgdatadir):
  1398. for variant in variants:
  1399. with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
  1400. fd.write("PACKAGES: %s\n" % ' '.join(
  1401. map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
  1402. def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
  1403. for variant in variants:
  1404. for pkg in packages.split():
  1405. ml_pkg = "%s-%s" % (variant, pkg)
  1406. subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
  1407. with open(subdata_file, 'w') as fd:
  1408. fd.write("PKG:%s: %s" % (ml_pkg, pkg))
  1409. packages = d.getVar('PACKAGES')
  1410. pkgdest = d.getVar('PKGDEST')
  1411. pkgdatadir = d.getVar('PKGDESTWORK')
  1412. data_file = pkgdatadir + d.expand("/${PN}")
  1413. with open(data_file, 'w') as fd:
  1414. fd.write("PACKAGES: %s\n" % packages)
  1415. pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
  1416. pn = d.getVar('PN')
  1417. global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
  1418. variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
  1419. if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
  1420. write_extra_pkgs(variants, pn, packages, pkgdatadir)
  1421. if bb.data.inherits_class('allarch', d) and not variants \
  1422. and not bb.data.inherits_class('packagegroup', d):
  1423. write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
  1424. workdir = d.getVar('WORKDIR')
  1425. for pkg in packages.split():
  1426. pkgval = d.getVar('PKG:%s' % pkg)
  1427. if pkgval is None:
  1428. pkgval = pkg
  1429. d.setVar('PKG:%s' % pkg, pkg)
  1430. extended_data = {
  1431. "files_info": {}
  1432. }
  1433. pkgdestpkg = os.path.join(pkgdest, pkg)
  1434. files = {}
  1435. files_extra = {}
  1436. total_size = 0
  1437. seen = set()
  1438. for f in pkgfiles[pkg]:
  1439. fpath = os.sep + os.path.relpath(f, pkgdestpkg)
  1440. fstat = os.lstat(f)
  1441. files[fpath] = fstat.st_size
  1442. extended_data["files_info"].setdefault(fpath, {})
  1443. extended_data["files_info"][fpath]['size'] = fstat.st_size
  1444. if fstat.st_ino not in seen:
  1445. seen.add(fstat.st_ino)
  1446. total_size += fstat.st_size
  1447. if fpath in pkgdebugsource:
  1448. extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
  1449. del pkgdebugsource[fpath]
  1450. d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
  1451. process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
  1452. add_set_e_to_scriptlets(pkg)
  1453. subdata_file = pkgdatadir + "/runtime/%s" % pkg
  1454. with open(subdata_file, 'w') as sf:
  1455. for var in (d.getVar('PKGDATA_VARS') or "").split():
  1456. val = write_if_exists(sf, pkg, var)
  1457. write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
  1458. for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
  1459. write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
  1460. write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
  1461. for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
  1462. write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
  1463. sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
  1464. subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
  1465. num_threads = int(d.getVar("BB_NUMBER_THREADS"))
  1466. with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
  1467. json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
  1468. # Symlinks needed for rprovides lookup
  1469. rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
  1470. if rprov:
  1471. for p in bb.utils.explode_deps(rprov):
  1472. subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
  1473. bb.utils.mkdirhier(os.path.dirname(subdata_sym))
  1474. oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
  1475. allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
  1476. if not allow_empty:
  1477. allow_empty = d.getVar('ALLOW_EMPTY')
  1478. root = "%s/%s" % (pkgdest, pkg)
  1479. os.chdir(root)
  1480. g = glob('*')
  1481. if g or allow_empty == "1":
  1482. # Symlinks needed for reverse lookups (from the final package name)
  1483. subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
  1484. oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
  1485. packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
  1486. open(packagedfile, 'w').close()
  1487. if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
  1488. write_extra_runtime_pkgs(variants, packages, pkgdatadir)
  1489. if bb.data.inherits_class('allarch', d) and not variants \
  1490. and not bb.data.inherits_class('packagegroup', d):
  1491. write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
  1492. }
  1493. emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
  1494. emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
  1495. ldconfig_postinst_fragment() {
  1496. if [ x"$D" = "x" ]; then
  1497. if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
  1498. fi
  1499. }
  1500. RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
  1501. # Collect perfile run-time dependency metadata
  1502. # Output:
  1503. # FILERPROVIDESFLIST:pkg - list of all files w/ deps
  1504. # FILERPROVIDES:filepath:pkg - per file dep
  1505. #
  1506. # FILERDEPENDSFLIST:pkg - list of all files w/ deps
  1507. # FILERDEPENDS:filepath:pkg - per file dep
  1508. python package_do_filedeps() {
  1509. if d.getVar('SKIP_FILEDEPS') == '1':
  1510. return
  1511. pkgdest = d.getVar('PKGDEST')
  1512. packages = d.getVar('PACKAGES')
  1513. rpmdeps = d.getVar('RPMDEPS')
  1514. def chunks(files, n):
  1515. return [files[i:i+n] for i in range(0, len(files), n)]
  1516. pkglist = []
  1517. for pkg in packages.split():
  1518. if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
  1519. continue
  1520. if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
  1521. continue
  1522. for files in chunks(pkgfiles[pkg], 100):
  1523. pkglist.append((pkg, files, rpmdeps, pkgdest))
  1524. processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
  1525. provides_files = {}
  1526. requires_files = {}
  1527. for result in processed:
  1528. (pkg, provides, requires) = result
  1529. if pkg not in provides_files:
  1530. provides_files[pkg] = []
  1531. if pkg not in requires_files:
  1532. requires_files[pkg] = []
  1533. for file in sorted(provides):
  1534. provides_files[pkg].append(file)
  1535. key = "FILERPROVIDES:" + file + ":" + pkg
  1536. d.appendVar(key, " " + " ".join(provides[file]))
  1537. for file in sorted(requires):
  1538. requires_files[pkg].append(file)
  1539. key = "FILERDEPENDS:" + file + ":" + pkg
  1540. d.appendVar(key, " " + " ".join(requires[file]))
  1541. for pkg in requires_files:
  1542. d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
  1543. for pkg in provides_files:
  1544. d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
  1545. }
  1546. SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
  1547. SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
  1548. python package_do_shlibs() {
  1549. import itertools
  1550. import re, pipes
  1551. import subprocess
  1552. exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
  1553. if exclude_shlibs:
  1554. bb.note("not generating shlibs")
  1555. return
  1556. lib_re = re.compile(r"^.*\.so")
  1557. libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
  1558. packages = d.getVar('PACKAGES')
  1559. shlib_pkgs = []
  1560. exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
  1561. if exclusion_list:
  1562. for pkg in packages.split():
  1563. if pkg not in exclusion_list.split():
  1564. shlib_pkgs.append(pkg)
  1565. else:
  1566. bb.note("not generating shlibs for %s" % pkg)
  1567. else:
  1568. shlib_pkgs = packages.split()
  1569. hostos = d.getVar('HOST_OS')
  1570. workdir = d.getVar('WORKDIR')
  1571. ver = d.getVar('PKGV')
  1572. if not ver:
  1573. msg = "PKGV not defined"
  1574. oe.qa.handle_error("pkgv-undefined", msg, d)
  1575. return
  1576. pkgdest = d.getVar('PKGDEST')
  1577. shlibswork_dir = d.getVar('SHLIBSWORKDIR')
  1578. def linux_so(file, pkg, pkgver, d):
  1579. needs_ldconfig = False
  1580. needed = set()
  1581. sonames = set()
  1582. renames = []
  1583. ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
  1584. cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
  1585. fd = os.popen(cmd)
  1586. lines = fd.readlines()
  1587. fd.close()
  1588. rpath = tuple()
  1589. for l in lines:
  1590. m = re.match(r"\s+RPATH\s+([^\s]*)", l)
  1591. if m:
  1592. rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
  1593. rpath = tuple(map(os.path.normpath, rpaths))
  1594. for l in lines:
  1595. m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
  1596. if m:
  1597. dep = m.group(1)
  1598. if dep not in needed:
  1599. needed.add((dep, file, rpath))
  1600. m = re.match(r"\s+SONAME\s+([^\s]*)", l)
  1601. if m:
  1602. this_soname = m.group(1)
  1603. prov = (this_soname, ldir, pkgver)
  1604. if not prov in sonames:
  1605. # if library is private (only used by package) then do not build shlib for it
  1606. import fnmatch
  1607. if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
  1608. sonames.add(prov)
  1609. if libdir_re.match(os.path.dirname(file)):
  1610. needs_ldconfig = True
  1611. if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
  1612. renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
  1613. return (needs_ldconfig, needed, sonames, renames)
  1614. def darwin_so(file, needed, sonames, renames, pkgver):
  1615. if not os.path.exists(file):
  1616. return
  1617. ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
  1618. def get_combinations(base):
  1619. #
  1620. # Given a base library name, find all combinations of this split by "." and "-"
  1621. #
  1622. combos = []
  1623. options = base.split(".")
  1624. for i in range(1, len(options) + 1):
  1625. combos.append(".".join(options[0:i]))
  1626. options = base.split("-")
  1627. for i in range(1, len(options) + 1):
  1628. combos.append("-".join(options[0:i]))
  1629. return combos
  1630. if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
  1631. # Drop suffix
  1632. name = os.path.basename(file).rsplit(".",1)[0]
  1633. # Find all combinations
  1634. combos = get_combinations(name)
  1635. for combo in combos:
  1636. if not combo in sonames:
  1637. prov = (combo, ldir, pkgver)
  1638. sonames.add(prov)
  1639. if file.endswith('.dylib') or file.endswith('.so'):
  1640. rpath = []
  1641. p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  1642. out, err = p.communicate()
  1643. # If returned successfully, process stdout for results
  1644. if p.returncode == 0:
  1645. for l in out.split("\n"):
  1646. l = l.strip()
  1647. if l.startswith('path '):
  1648. rpath.append(l.split()[1])
  1649. p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  1650. out, err = p.communicate()
  1651. # If returned successfully, process stdout for results
  1652. if p.returncode == 0:
  1653. for l in out.split("\n"):
  1654. l = l.strip()
  1655. if not l or l.endswith(":"):
  1656. continue
  1657. if "is not an object file" in l:
  1658. continue
  1659. name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
  1660. if name and name not in needed[pkg]:
  1661. needed[pkg].add((name, file, tuple()))
  1662. def mingw_dll(file, needed, sonames, renames, pkgver):
  1663. if not os.path.exists(file):
  1664. return
  1665. if file.endswith(".dll"):
  1666. # assume all dlls are shared objects provided by the package
  1667. sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
  1668. if (file.endswith(".dll") or file.endswith(".exe")):
  1669. # use objdump to search for "DLL Name: .*\.dll"
  1670. p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  1671. out, err = p.communicate()
  1672. # process the output, grabbing all .dll names
  1673. if p.returncode == 0:
  1674. for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
  1675. dllname = m.group(1)
  1676. if dllname:
  1677. needed[pkg].add((dllname, file, tuple()))
  1678. if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
  1679. snap_symlinks = True
  1680. else:
  1681. snap_symlinks = False
  1682. needed = {}
  1683. shlib_provider = oe.package.read_shlib_providers(d)
  1684. for pkg in shlib_pkgs:
  1685. private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
  1686. private_libs = private_libs.split()
  1687. needs_ldconfig = False
  1688. bb.debug(2, "calculating shlib provides for %s" % pkg)
  1689. pkgver = d.getVar('PKGV:' + pkg)
  1690. if not pkgver:
  1691. pkgver = d.getVar('PV_' + pkg)
  1692. if not pkgver:
  1693. pkgver = ver
  1694. needed[pkg] = set()
  1695. sonames = set()
  1696. renames = []
  1697. linuxlist = []
  1698. for file in pkgfiles[pkg]:
  1699. soname = None
  1700. if cpath.islink(file):
  1701. continue
  1702. if hostos == "darwin" or hostos == "darwin8":
  1703. darwin_so(file, needed, sonames, renames, pkgver)
  1704. elif hostos.startswith("mingw"):
  1705. mingw_dll(file, needed, sonames, renames, pkgver)
  1706. elif os.access(file, os.X_OK) or lib_re.match(file):
  1707. linuxlist.append(file)
  1708. if linuxlist:
  1709. results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
  1710. for r in results:
  1711. ldconfig = r[0]
  1712. needed[pkg] |= r[1]
  1713. sonames |= r[2]
  1714. renames.extend(r[3])
  1715. needs_ldconfig = needs_ldconfig or ldconfig
  1716. for (old, new) in renames:
  1717. bb.note("Renaming %s to %s" % (old, new))
  1718. bb.utils.rename(old, new)
  1719. pkgfiles[pkg].remove(old)
  1720. shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
  1721. if len(sonames):
  1722. with open(shlibs_file, 'w') as fd:
  1723. for s in sorted(sonames):
  1724. if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
  1725. (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
  1726. if old_pkg != pkg:
  1727. bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
  1728. bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
  1729. fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
  1730. if s[0] not in shlib_provider:
  1731. shlib_provider[s[0]] = {}
  1732. shlib_provider[s[0]][s[1]] = (pkg, pkgver)
  1733. if needs_ldconfig:
  1734. bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
  1735. postinst = d.getVar('pkg_postinst:%s' % pkg)
  1736. if not postinst:
  1737. postinst = '#!/bin/sh\n'
  1738. postinst += d.getVar('ldconfig_postinst_fragment')
  1739. d.setVar('pkg_postinst:%s' % pkg, postinst)
  1740. bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
  1741. assumed_libs = d.getVar('ASSUME_SHLIBS')
  1742. if assumed_libs:
  1743. libdir = d.getVar("libdir")
  1744. for e in assumed_libs.split():
  1745. l, dep_pkg = e.split(":")
  1746. lib_ver = None
  1747. dep_pkg = dep_pkg.rsplit("_", 1)
  1748. if len(dep_pkg) == 2:
  1749. lib_ver = dep_pkg[1]
  1750. dep_pkg = dep_pkg[0]
  1751. if l not in shlib_provider:
  1752. shlib_provider[l] = {}
  1753. shlib_provider[l][libdir] = (dep_pkg, lib_ver)
  1754. libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
  1755. for pkg in shlib_pkgs:
  1756. bb.debug(2, "calculating shlib requirements for %s" % pkg)
  1757. private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
  1758. private_libs = private_libs.split()
  1759. deps = list()
  1760. for n in needed[pkg]:
  1761. # if n is in private libraries, don't try to search provider for it
  1762. # this could cause problem in case some abc.bb provides private
  1763. # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
  1764. # but skipping it is still better alternative than providing own
  1765. # version and then adding runtime dependency for the same system library
  1766. import fnmatch
  1767. if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
  1768. bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
  1769. continue
  1770. if n[0] in shlib_provider.keys():
  1771. shlib_provider_map = shlib_provider[n[0]]
  1772. matches = set()
  1773. for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
  1774. if p in shlib_provider_map:
  1775. matches.add(p)
  1776. if len(matches) > 1:
  1777. matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
  1778. bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
  1779. elif len(matches) == 1:
  1780. (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
  1781. bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
  1782. if dep_pkg == pkg:
  1783. continue
  1784. if ver_needed:
  1785. dep = "%s (>= %s)" % (dep_pkg, ver_needed)
  1786. else:
  1787. dep = dep_pkg
  1788. if not dep in deps:
  1789. deps.append(dep)
  1790. continue
  1791. bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
  1792. deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
  1793. if os.path.exists(deps_file):
  1794. os.remove(deps_file)
  1795. if deps:
  1796. with open(deps_file, 'w') as fd:
  1797. for dep in sorted(deps):
  1798. fd.write(dep + '\n')
  1799. }
  1800. python package_do_pkgconfig () {
  1801. import re
  1802. packages = d.getVar('PACKAGES')
  1803. workdir = d.getVar('WORKDIR')
  1804. pkgdest = d.getVar('PKGDEST')
  1805. shlibs_dirs = d.getVar('SHLIBSDIRS').split()
  1806. shlibswork_dir = d.getVar('SHLIBSWORKDIR')
  1807. pc_re = re.compile(r'(.*)\.pc$')
  1808. var_re = re.compile(r'(.*)=(.*)')
  1809. field_re = re.compile(r'(.*): (.*)')
  1810. pkgconfig_provided = {}
  1811. pkgconfig_needed = {}
  1812. for pkg in packages.split():
  1813. pkgconfig_provided[pkg] = []
  1814. pkgconfig_needed[pkg] = []
  1815. for file in sorted(pkgfiles[pkg]):
  1816. m = pc_re.match(file)
  1817. if m:
  1818. pd = bb.data.init()
  1819. name = m.group(1)
  1820. pkgconfig_provided[pkg].append(os.path.basename(name))
  1821. if not os.access(file, os.R_OK):
  1822. continue
  1823. with open(file, 'r') as f:
  1824. lines = f.readlines()
  1825. for l in lines:
  1826. m = var_re.match(l)
  1827. if m:
  1828. name = m.group(1)
  1829. val = m.group(2)
  1830. pd.setVar(name, pd.expand(val))
  1831. continue
  1832. m = field_re.match(l)
  1833. if m:
  1834. hdr = m.group(1)
  1835. exp = pd.expand(m.group(2))
  1836. if hdr == 'Requires':
  1837. pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
  1838. for pkg in packages.split():
  1839. pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
  1840. if pkgconfig_provided[pkg] != []:
  1841. with open(pkgs_file, 'w') as f:
  1842. for p in sorted(pkgconfig_provided[pkg]):
  1843. f.write('%s\n' % p)
  1844. # Go from least to most specific since the last one found wins
  1845. for dir in reversed(shlibs_dirs):
  1846. if not os.path.exists(dir):
  1847. continue
  1848. for file in sorted(os.listdir(dir)):
  1849. m = re.match(r'^(.*)\.pclist$', file)
  1850. if m:
  1851. pkg = m.group(1)
  1852. with open(os.path.join(dir, file)) as fd:
  1853. lines = fd.readlines()
  1854. pkgconfig_provided[pkg] = []
  1855. for l in lines:
  1856. pkgconfig_provided[pkg].append(l.rstrip())
  1857. for pkg in packages.split():
  1858. deps = []
  1859. for n in pkgconfig_needed[pkg]:
  1860. found = False
  1861. for k in pkgconfig_provided.keys():
  1862. if n in pkgconfig_provided[k]:
  1863. if k != pkg and not (k in deps):
  1864. deps.append(k)
  1865. found = True
  1866. if found == False:
  1867. bb.note("couldn't find pkgconfig module '%s' in any package" % n)
  1868. deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
  1869. if len(deps):
  1870. with open(deps_file, 'w') as fd:
  1871. for dep in deps:
  1872. fd.write(dep + '\n')
  1873. }
  1874. def read_libdep_files(d):
  1875. pkglibdeps = {}
  1876. packages = d.getVar('PACKAGES').split()
  1877. for pkg in packages:
  1878. pkglibdeps[pkg] = {}
  1879. for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
  1880. depsfile = d.expand("${PKGDEST}/" + pkg + extension)
  1881. if os.access(depsfile, os.R_OK):
  1882. with open(depsfile) as fd:
  1883. lines = fd.readlines()
  1884. for l in lines:
  1885. l.rstrip()
  1886. deps = bb.utils.explode_dep_versions2(l)
  1887. for dep in deps:
  1888. if not dep in pkglibdeps[pkg]:
  1889. pkglibdeps[pkg][dep] = deps[dep]
  1890. return pkglibdeps
  1891. python read_shlibdeps () {
  1892. pkglibdeps = read_libdep_files(d)
  1893. packages = d.getVar('PACKAGES').split()
  1894. for pkg in packages:
  1895. rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
  1896. for dep in sorted(pkglibdeps[pkg]):
  1897. # Add the dep if it's not already there, or if no comparison is set
  1898. if dep not in rdepends:
  1899. rdepends[dep] = []
  1900. for v in pkglibdeps[pkg][dep]:
  1901. if v not in rdepends[dep]:
  1902. rdepends[dep].append(v)
  1903. d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
  1904. }
  1905. python package_depchains() {
  1906. """
  1907. For a given set of prefix and postfix modifiers, make those packages
  1908. RRECOMMENDS on the corresponding packages for its RDEPENDS.
  1909. Example: If package A depends upon package B, and A's .bb emits an
  1910. A-dev package, this would make A-dev Recommends: B-dev.
  1911. If only one of a given suffix is specified, it will take the RRECOMMENDS
  1912. based on the RDEPENDS of *all* other packages. If more than one of a given
  1913. suffix is specified, its will only use the RDEPENDS of the single parent
  1914. package.
  1915. """
  1916. packages = d.getVar('PACKAGES')
  1917. postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
  1918. prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
  1919. def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
  1920. #bb.note('depends for %s is %s' % (base, depends))
  1921. rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
  1922. for depend in sorted(depends):
  1923. if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
  1924. #bb.note("Skipping %s" % depend)
  1925. continue
  1926. if depend.endswith('-dev'):
  1927. depend = depend[:-4]
  1928. if depend.endswith('-dbg'):
  1929. depend = depend[:-4]
  1930. pkgname = getname(depend, suffix)
  1931. #bb.note("Adding %s for %s" % (pkgname, depend))
  1932. if pkgname not in rreclist and pkgname != pkg:
  1933. rreclist[pkgname] = []
  1934. #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
  1935. d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
  1936. def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
  1937. #bb.note('rdepends for %s is %s' % (base, rdepends))
  1938. rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
  1939. for depend in sorted(rdepends):
  1940. if depend.find('virtual-locale-') != -1:
  1941. #bb.note("Skipping %s" % depend)
  1942. continue
  1943. if depend.endswith('-dev'):
  1944. depend = depend[:-4]
  1945. if depend.endswith('-dbg'):
  1946. depend = depend[:-4]
  1947. pkgname = getname(depend, suffix)
  1948. #bb.note("Adding %s for %s" % (pkgname, depend))
  1949. if pkgname not in rreclist and pkgname != pkg:
  1950. rreclist[pkgname] = []
  1951. #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
  1952. d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
  1953. def add_dep(list, dep):
  1954. if dep not in list:
  1955. list.append(dep)
  1956. depends = []
  1957. for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
  1958. add_dep(depends, dep)
  1959. rdepends = []
  1960. for pkg in packages.split():
  1961. for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
  1962. add_dep(rdepends, dep)
  1963. #bb.note('rdepends is %s' % rdepends)
  1964. def post_getname(name, suffix):
  1965. return '%s%s' % (name, suffix)
  1966. def pre_getname(name, suffix):
  1967. return '%s%s' % (suffix, name)
  1968. pkgs = {}
  1969. for pkg in packages.split():
  1970. for postfix in postfixes:
  1971. if pkg.endswith(postfix):
  1972. if not postfix in pkgs:
  1973. pkgs[postfix] = {}
  1974. pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
  1975. for prefix in prefixes:
  1976. if pkg.startswith(prefix):
  1977. if not prefix in pkgs:
  1978. pkgs[prefix] = {}
  1979. pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
  1980. if "-dbg" in pkgs:
  1981. pkglibdeps = read_libdep_files(d)
  1982. pkglibdeplist = []
  1983. for pkg in pkglibdeps:
  1984. for k in pkglibdeps[pkg]:
  1985. add_dep(pkglibdeplist, k)
  1986. dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
  1987. for suffix in pkgs:
  1988. for pkg in pkgs[suffix]:
  1989. if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
  1990. continue
  1991. (base, func) = pkgs[suffix][pkg]
  1992. if suffix == "-dev":
  1993. pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
  1994. elif suffix == "-dbg":
  1995. if not dbgdefaultdeps:
  1996. pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
  1997. continue
  1998. if len(pkgs[suffix]) == 1:
  1999. pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
  2000. else:
  2001. rdeps = []
  2002. for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
  2003. add_dep(rdeps, dep)
  2004. pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
  2005. }
  2006. # Since bitbake can't determine which variables are accessed during package
  2007. # iteration, we need to list them here:
  2008. PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
  2009. def gen_packagevar(d, pkgvars="PACKAGEVARS"):
  2010. ret = []
  2011. pkgs = (d.getVar("PACKAGES") or "").split()
  2012. vars = (d.getVar(pkgvars) or "").split()
  2013. for v in vars:
  2014. ret.append(v)
  2015. for p in pkgs:
  2016. for v in vars:
  2017. ret.append(v + ":" + p)
  2018. # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
  2019. # affected recipes.
  2020. ret.append('_exclude_incompatible-%s' % p)
  2021. return " ".join(ret)
  2022. PACKAGE_PREPROCESS_FUNCS ?= ""
  2023. # Functions for setting up PKGD
  2024. PACKAGEBUILDPKGD ?= " \
  2025. package_prepare_pkgdata \
  2026. perform_packagecopy \
  2027. ${PACKAGE_PREPROCESS_FUNCS} \
  2028. split_and_strip_files \
  2029. fixup_perms \
  2030. "
  2031. # Functions which split PKGD up into separate packages
  2032. PACKAGESPLITFUNCS ?= " \
  2033. package_do_split_locales \
  2034. populate_packages"
  2035. # Functions which process metadata based on split packages
  2036. PACKAGEFUNCS += " \
  2037. package_fixsymlinks \
  2038. package_name_hook \
  2039. package_do_filedeps \
  2040. package_do_shlibs \
  2041. package_do_pkgconfig \
  2042. read_shlibdeps \
  2043. package_depchains \
  2044. emit_pkgdata"
  2045. python do_package () {
  2046. # Change the following version to cause sstate to invalidate the package
  2047. # cache. This is useful if an item this class depends on changes in a
  2048. # way that the output of this class changes. rpmdeps is a good example
  2049. # as any change to rpmdeps requires this to be rerun.
  2050. # PACKAGE_BBCLASS_VERSION = "4"
  2051. # Init cachedpath
  2052. global cpath
  2053. cpath = oe.cachedpath.CachedPath()
  2054. ###########################################################################
  2055. # Sanity test the setup
  2056. ###########################################################################
  2057. packages = (d.getVar('PACKAGES') or "").split()
  2058. if len(packages) < 1:
  2059. bb.debug(1, "No packages to build, skipping do_package")
  2060. return
  2061. workdir = d.getVar('WORKDIR')
  2062. outdir = d.getVar('DEPLOY_DIR')
  2063. dest = d.getVar('D')
  2064. dvar = d.getVar('PKGD')
  2065. pn = d.getVar('PN')
  2066. if not workdir or not outdir or not dest or not dvar or not pn:
  2067. msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
  2068. oe.qa.handle_error("var-undefined", msg, d)
  2069. return
  2070. bb.build.exec_func("package_convert_pr_autoinc", d)
  2071. ###########################################################################
  2072. # Optimisations
  2073. ###########################################################################
  2074. # Continually expanding complex expressions is inefficient, particularly
  2075. # when we write to the datastore and invalidate the expansion cache. This
  2076. # code pre-expands some frequently used variables
  2077. def expandVar(x, d):
  2078. d.setVar(x, d.getVar(x))
  2079. for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
  2080. expandVar(x, d)
  2081. ###########################################################################
  2082. # Setup PKGD (from D)
  2083. ###########################################################################
  2084. for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
  2085. bb.build.exec_func(f, d)
  2086. ###########################################################################
  2087. # Split up PKGD into PKGDEST
  2088. ###########################################################################
  2089. cpath = oe.cachedpath.CachedPath()
  2090. for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
  2091. bb.build.exec_func(f, d)
  2092. ###########################################################################
  2093. # Process PKGDEST
  2094. ###########################################################################
  2095. # Build global list of files in each split package
  2096. global pkgfiles
  2097. pkgfiles = {}
  2098. packages = d.getVar('PACKAGES').split()
  2099. pkgdest = d.getVar('PKGDEST')
  2100. for pkg in packages:
  2101. pkgfiles[pkg] = []
  2102. for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
  2103. for file in files:
  2104. pkgfiles[pkg].append(walkroot + os.sep + file)
  2105. for f in (d.getVar('PACKAGEFUNCS') or '').split():
  2106. bb.build.exec_func(f, d)
  2107. oe.qa.exit_if_errors(d)
  2108. }
  2109. do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
  2110. do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
  2111. addtask package after do_install
  2112. SSTATETASKS += "do_package"
  2113. do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
  2114. do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
  2115. do_package_setscene[dirs] = "${STAGING_DIR}"
  2116. python do_package_setscene () {
  2117. sstate_setscene(d)
  2118. }
  2119. addtask do_package_setscene
  2120. # Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
  2121. # do_package_setscene and do_packagedata_setscene leading to races
  2122. python do_packagedata () {
  2123. bb.build.exec_func("package_get_auto_pr", d)
  2124. src = d.expand("${PKGDESTWORK}")
  2125. dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
  2126. oe.path.copyhardlinktree(src, dest)
  2127. bb.build.exec_func("packagedata_translate_pr_autoinc", d)
  2128. }
  2129. do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
  2130. # Translate the EXTENDPRAUTO and AUTOINC to the final values
  2131. packagedata_translate_pr_autoinc() {
  2132. find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
  2133. sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
  2134. -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
  2135. }
  2136. addtask packagedata before do_build after do_package
  2137. SSTATETASKS += "do_packagedata"
  2138. do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
  2139. do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
  2140. do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
  2141. python do_packagedata_setscene () {
  2142. sstate_setscene(d)
  2143. }
  2144. addtask do_packagedata_setscene
  2145. #
  2146. # Helper functions for the package writing classes
  2147. #
  2148. def mapping_rename_hook(d):
  2149. """
  2150. Rewrite variables to account for package renaming in things
  2151. like debian.bbclass or manual PKG variable name changes
  2152. """
  2153. pkg = d.getVar("PKG")
  2154. runtime_mapping_rename("RDEPENDS", pkg, d)
  2155. runtime_mapping_rename("RRECOMMENDS", pkg, d)
  2156. runtime_mapping_rename("RSUGGESTS", pkg, d)