git.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722
  1. """
  2. BitBake 'Fetch' git implementation
  3. git fetcher support the SRC_URI with format of:
  4. SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
  5. Supported SRC_URI options are:
  6. - branch
  7. The git branch to retrieve from. The default is "master"
  8. This option also supports multiple branch fetching, with branches
  9. separated by commas. In multiple branches case, the name option
  10. must have the same number of names to match the branches, which is
  11. used to specify the SRC_REV for the branch
  12. e.g:
  13. SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
  14. SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
  15. SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
  16. - tag
  17. The git tag to retrieve. The default is "master"
  18. - protocol
  19. The method to use to access the repository. Common options are "git",
  20. "http", "https", "file", "ssh" and "rsync". The default is "git".
  21. - rebaseable
  22. rebaseable indicates that the upstream git repo may rebase in the future,
  23. and current revision may disappear from upstream repo. This option will
  24. remind fetcher to preserve local cache carefully for future use.
  25. The default value is "0", set rebaseable=1 for rebaseable git repo.
  26. - nocheckout
  27. Don't checkout source code when unpacking. set this option for the recipe
  28. who has its own routine to checkout code.
  29. The default is "0", set nocheckout=1 if needed.
  30. - bareclone
  31. Create a bare clone of the source code and don't checkout the source code
  32. when unpacking. Set this option for the recipe who has its own routine to
  33. checkout code and tracking branch requirements.
  34. The default is "0", set bareclone=1 if needed.
  35. - nobranch
  36. Don't check the SHA validation for branch. set this option for the recipe
  37. referring to commit which is valid in tag instead of branch.
  38. The default is "0", set nobranch=1 if needed.
  39. - usehead
  40. For local git:// urls to use the current branch HEAD as the revision for use with
  41. AUTOREV. Implies nobranch.
  42. """
  43. # Copyright (C) 2005 Richard Purdie
  44. #
  45. # SPDX-License-Identifier: GPL-2.0-only
  46. #
  47. import collections
  48. import errno
  49. import fnmatch
  50. import os
  51. import re
  52. import subprocess
  53. import tempfile
  54. import bb
  55. import bb.progress
  56. from bb.fetch2 import FetchMethod
  57. from bb.fetch2 import runfetchcmd
  58. from bb.fetch2 import logger
  59. class GitProgressHandler(bb.progress.LineFilterProgressHandler):
  60. """Extract progress information from git output"""
  61. def __init__(self, d):
  62. self._buffer = ''
  63. self._count = 0
  64. super(GitProgressHandler, self).__init__(d)
  65. # Send an initial progress event so the bar gets shown
  66. self._fire_progress(-1)
  67. def write(self, string):
  68. self._buffer += string
  69. stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
  70. stage_weights = [0.2, 0.05, 0.5, 0.25]
  71. stagenum = 0
  72. for i, stage in reversed(list(enumerate(stages))):
  73. if stage in self._buffer:
  74. stagenum = i
  75. self._buffer = ''
  76. break
  77. self._status = stages[stagenum]
  78. percs = re.findall(r'(\d+)%', string)
  79. if percs:
  80. progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
  81. rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
  82. if rates:
  83. rate = rates[-1]
  84. else:
  85. rate = None
  86. self.update(progress, rate)
  87. else:
  88. if stagenum == 0:
  89. percs = re.findall(r': (\d+)', string)
  90. if percs:
  91. count = int(percs[-1])
  92. if count > self._count:
  93. self._count = count
  94. self._fire_progress(-count)
  95. super(GitProgressHandler, self).write(string)
  96. class Git(FetchMethod):
  97. bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
  98. make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
  99. """Class to fetch a module or modules from git repositories"""
  100. def init(self, d):
  101. pass
  102. def supports(self, ud, d):
  103. """
  104. Check to see if a given url can be fetched with git.
  105. """
  106. return ud.type in ['git']
  107. def supports_checksum(self, urldata):
  108. return False
  109. def urldata_init(self, ud, d):
  110. """
  111. init git specific variable within url data
  112. so that the git method like latest_revision() can work
  113. """
  114. if 'protocol' in ud.parm:
  115. ud.proto = ud.parm['protocol']
  116. elif not ud.host:
  117. ud.proto = 'file'
  118. else:
  119. ud.proto = "git"
  120. if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
  121. raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
  122. ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
  123. ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
  124. ud.nobranch = ud.parm.get("nobranch","0") == "1"
  125. # usehead implies nobranch
  126. ud.usehead = ud.parm.get("usehead","0") == "1"
  127. if ud.usehead:
  128. if ud.proto != "file":
  129. raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
  130. ud.nobranch = 1
  131. # bareclone implies nocheckout
  132. ud.bareclone = ud.parm.get("bareclone","0") == "1"
  133. if ud.bareclone:
  134. ud.nocheckout = 1
  135. ud.unresolvedrev = {}
  136. branches = ud.parm.get("branch", "master").split(',')
  137. if len(branches) != len(ud.names):
  138. raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
  139. ud.cloneflags = "-s -n"
  140. if ud.bareclone:
  141. ud.cloneflags += " --mirror"
  142. ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
  143. ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
  144. depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
  145. if depth_default is not None:
  146. try:
  147. depth_default = int(depth_default or 0)
  148. except ValueError:
  149. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
  150. else:
  151. if depth_default < 0:
  152. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
  153. else:
  154. depth_default = 1
  155. ud.shallow_depths = collections.defaultdict(lambda: depth_default)
  156. revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
  157. ud.shallow_revs = []
  158. ud.branches = {}
  159. for pos, name in enumerate(ud.names):
  160. branch = branches[pos]
  161. ud.branches[name] = branch
  162. ud.unresolvedrev[name] = branch
  163. shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name)
  164. if shallow_depth is not None:
  165. try:
  166. shallow_depth = int(shallow_depth or 0)
  167. except ValueError:
  168. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
  169. else:
  170. if shallow_depth < 0:
  171. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
  172. ud.shallow_depths[name] = shallow_depth
  173. revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name)
  174. if revs is not None:
  175. ud.shallow_revs.extend(revs.split())
  176. elif revs_default is not None:
  177. ud.shallow_revs.extend(revs_default.split())
  178. if (ud.shallow and
  179. not ud.shallow_revs and
  180. all(ud.shallow_depths[n] == 0 for n in ud.names)):
  181. # Shallow disabled for this URL
  182. ud.shallow = False
  183. if ud.usehead:
  184. ud.unresolvedrev['default'] = 'HEAD'
  185. ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
  186. write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
  187. ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
  188. ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
  189. ud.setup_revisions(d)
  190. for name in ud.names:
  191. # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
  192. if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
  193. if ud.revisions[name]:
  194. ud.unresolvedrev[name] = ud.revisions[name]
  195. ud.revisions[name] = self.latest_revision(ud, d, name)
  196. gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
  197. if gitsrcname.startswith('.'):
  198. gitsrcname = gitsrcname[1:]
  199. # for rebaseable git repo, it is necessary to keep mirror tar ball
  200. # per revision, so that even the revision disappears from the
  201. # upstream repo in the future, the mirror will remain intact and still
  202. # contains the revision
  203. if ud.rebaseable:
  204. for name in ud.names:
  205. gitsrcname = gitsrcname + '_' + ud.revisions[name]
  206. dl_dir = d.getVar("DL_DIR")
  207. gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
  208. ud.clonedir = os.path.join(gitdir, gitsrcname)
  209. ud.localfile = ud.clonedir
  210. mirrortarball = 'git2_%s.tar.gz' % gitsrcname
  211. ud.fullmirror = os.path.join(dl_dir, mirrortarball)
  212. ud.mirrortarballs = [mirrortarball]
  213. if ud.shallow:
  214. tarballname = gitsrcname
  215. if ud.bareclone:
  216. tarballname = "%s_bare" % tarballname
  217. if ud.shallow_revs:
  218. tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
  219. for name, revision in sorted(ud.revisions.items()):
  220. tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7])
  221. depth = ud.shallow_depths[name]
  222. if depth:
  223. tarballname = "%s-%s" % (tarballname, depth)
  224. shallow_refs = []
  225. if not ud.nobranch:
  226. shallow_refs.extend(ud.branches.values())
  227. if ud.shallow_extra_refs:
  228. shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
  229. if shallow_refs:
  230. tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
  231. fetcher = self.__class__.__name__.lower()
  232. ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
  233. ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
  234. ud.mirrortarballs.insert(0, ud.shallowtarball)
  235. def localpath(self, ud, d):
  236. return ud.clonedir
  237. def need_update(self, ud, d):
  238. return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
  239. def clonedir_need_update(self, ud, d):
  240. if not os.path.exists(ud.clonedir):
  241. return True
  242. if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
  243. return True
  244. for name in ud.names:
  245. if not self._contains_ref(ud, d, name, ud.clonedir):
  246. return True
  247. return False
  248. def clonedir_need_shallow_revs(self, ud, d):
  249. for rev in ud.shallow_revs:
  250. try:
  251. runfetchcmd('%s rev-parse -q --verify %s' % (ud.basecmd, rev), d, quiet=True, workdir=ud.clonedir)
  252. except bb.fetch2.FetchError:
  253. return rev
  254. return None
  255. def shallow_tarball_need_update(self, ud):
  256. return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
  257. def tarball_need_update(self, ud):
  258. return ud.write_tarballs and not os.path.exists(ud.fullmirror)
  259. def try_premirror(self, ud, d):
  260. # If we don't do this, updating an existing checkout with only premirrors
  261. # is not possible
  262. if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
  263. return True
  264. if os.path.exists(ud.clonedir):
  265. return False
  266. return True
  267. def download(self, ud, d):
  268. """Fetch url"""
  269. # A current clone is preferred to either tarball, a shallow tarball is
  270. # preferred to an out of date clone, and a missing clone will use
  271. # either tarball.
  272. if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
  273. ud.localpath = ud.fullshallow
  274. return
  275. elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
  276. bb.utils.mkdirhier(ud.clonedir)
  277. runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
  278. repourl = self._get_repo_url(ud)
  279. # If the repo still doesn't exist, fallback to cloning it
  280. if not os.path.exists(ud.clonedir):
  281. # We do this since git will use a "-l" option automatically for local urls where possible
  282. if repourl.startswith("file://"):
  283. repourl = repourl[7:]
  284. clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
  285. if ud.proto.lower() != 'file':
  286. bb.fetch2.check_network_access(d, clone_cmd, ud.url)
  287. progresshandler = GitProgressHandler(d)
  288. runfetchcmd(clone_cmd, d, log=progresshandler)
  289. # Update the checkout if needed
  290. if self.clonedir_need_update(ud, d):
  291. output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
  292. if "origin" in output:
  293. runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
  294. runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
  295. fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
  296. if ud.proto.lower() != 'file':
  297. bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
  298. progresshandler = GitProgressHandler(d)
  299. runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
  300. runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
  301. runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
  302. runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
  303. try:
  304. os.unlink(ud.fullmirror)
  305. except OSError as exc:
  306. if exc.errno != errno.ENOENT:
  307. raise
  308. for name in ud.names:
  309. if not self._contains_ref(ud, d, name, ud.clonedir):
  310. raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
  311. if ud.shallow and ud.write_shallow_tarballs:
  312. missing_rev = self.clonedir_need_shallow_revs(ud, d)
  313. if missing_rev:
  314. raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
  315. def build_mirror_data(self, ud, d):
  316. if ud.shallow and ud.write_shallow_tarballs:
  317. if not os.path.exists(ud.fullshallow):
  318. if os.path.islink(ud.fullshallow):
  319. os.unlink(ud.fullshallow)
  320. tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
  321. shallowclone = os.path.join(tempdir, 'git')
  322. try:
  323. self.clone_shallow_local(ud, shallowclone, d)
  324. logger.info("Creating tarball of git repository")
  325. runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
  326. runfetchcmd("touch %s.done" % ud.fullshallow, d)
  327. finally:
  328. bb.utils.remove(tempdir, recurse=True)
  329. elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
  330. if os.path.islink(ud.fullmirror):
  331. os.unlink(ud.fullmirror)
  332. logger.info("Creating tarball of git repository")
  333. runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
  334. runfetchcmd("touch %s.done" % ud.fullmirror, d)
  335. def clone_shallow_local(self, ud, dest, d):
  336. """Clone the repo and make it shallow.
  337. The upstream url of the new clone isn't set at this time, as it'll be
  338. set correctly when unpacked."""
  339. runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
  340. to_parse, shallow_branches = [], []
  341. for name in ud.names:
  342. revision = ud.revisions[name]
  343. depth = ud.shallow_depths[name]
  344. if depth:
  345. to_parse.append('%s~%d^{}' % (revision, depth - 1))
  346. # For nobranch, we need a ref, otherwise the commits will be
  347. # removed, and for non-nobranch, we truncate the branch to our
  348. # srcrev, to avoid keeping unnecessary history beyond that.
  349. branch = ud.branches[name]
  350. if ud.nobranch:
  351. ref = "refs/shallow/%s" % name
  352. elif ud.bareclone:
  353. ref = "refs/heads/%s" % branch
  354. else:
  355. ref = "refs/remotes/origin/%s" % branch
  356. shallow_branches.append(ref)
  357. runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
  358. # Map srcrev+depths to revisions
  359. parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
  360. # Resolve specified revisions
  361. parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
  362. shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
  363. # Apply extra ref wildcards
  364. all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
  365. d, workdir=dest).splitlines()
  366. for r in ud.shallow_extra_refs:
  367. if not ud.bareclone:
  368. r = r.replace('refs/heads/', 'refs/remotes/origin/')
  369. if '*' in r:
  370. matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
  371. shallow_branches.extend(matches)
  372. else:
  373. shallow_branches.append(r)
  374. # Make the repository shallow
  375. shallow_cmd = [self.make_shallow_path, '-s']
  376. for b in shallow_branches:
  377. shallow_cmd.append('-r')
  378. shallow_cmd.append(b)
  379. shallow_cmd.extend(shallow_revisions)
  380. runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
  381. def unpack(self, ud, destdir, d):
  382. """ unpack the downloaded src to destdir"""
  383. subdir = ud.parm.get("subpath", "")
  384. if subdir != "":
  385. readpathspec = ":%s" % subdir
  386. def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
  387. else:
  388. readpathspec = ""
  389. def_destsuffix = "git/"
  390. destsuffix = ud.parm.get("destsuffix", def_destsuffix)
  391. destdir = ud.destdir = os.path.join(destdir, destsuffix)
  392. if os.path.exists(destdir):
  393. bb.utils.prunedir(destdir)
  394. need_lfs = ud.parm.get("lfs", "1") == "1"
  395. source_found = False
  396. source_error = []
  397. if not source_found:
  398. clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
  399. if clonedir_is_up_to_date:
  400. runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
  401. source_found = True
  402. else:
  403. source_error.append("clone directory not available or not up to date: " + ud.clonedir)
  404. if not source_found:
  405. if ud.shallow:
  406. if os.path.exists(ud.fullshallow):
  407. bb.utils.mkdirhier(destdir)
  408. runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
  409. source_found = True
  410. else:
  411. source_error.append("shallow clone not available: " + ud.fullshallow)
  412. else:
  413. source_error.append("shallow clone not enabled")
  414. if not source_found:
  415. raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
  416. repourl = self._get_repo_url(ud)
  417. runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
  418. if self._contains_lfs(ud, d, destdir):
  419. if need_lfs and not self._find_git_lfs(d):
  420. raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
  421. else:
  422. bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
  423. if not ud.nocheckout:
  424. if subdir != "":
  425. runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
  426. workdir=destdir)
  427. runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
  428. elif not ud.nobranch:
  429. branchname = ud.branches[ud.names[0]]
  430. runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
  431. ud.revisions[ud.names[0]]), d, workdir=destdir)
  432. runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
  433. branchname), d, workdir=destdir)
  434. else:
  435. runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
  436. return True
  437. def clean(self, ud, d):
  438. """ clean the git directory """
  439. to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
  440. # The localpath is a symlink to clonedir when it is cloned from a
  441. # mirror, so remove both of them.
  442. if os.path.islink(ud.localpath):
  443. clonedir = os.path.realpath(ud.localpath)
  444. to_remove.append(clonedir)
  445. for r in to_remove:
  446. if os.path.exists(r):
  447. bb.note('Removing %s' % r)
  448. bb.utils.remove(r, True)
  449. def supports_srcrev(self):
  450. return True
  451. def _contains_ref(self, ud, d, name, wd):
  452. cmd = ""
  453. if ud.nobranch:
  454. cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
  455. ud.basecmd, ud.revisions[name])
  456. else:
  457. cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
  458. ud.basecmd, ud.revisions[name], ud.branches[name])
  459. try:
  460. output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
  461. except bb.fetch2.FetchError:
  462. return False
  463. if len(output.split()) > 1:
  464. raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
  465. return output.split()[0] != "0"
  466. def _contains_lfs(self, ud, d, wd):
  467. """
  468. Check if the repository has 'lfs' (large file) content
  469. """
  470. cmd = "%s grep lfs HEAD:.gitattributes | wc -l" % (
  471. ud.basecmd)
  472. try:
  473. output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
  474. if int(output) > 0:
  475. return True
  476. except (bb.fetch2.FetchError,ValueError):
  477. pass
  478. return False
  479. def _find_git_lfs(self, d):
  480. """
  481. Return True if git-lfs can be found, False otherwise.
  482. """
  483. import shutil
  484. return shutil.which("git-lfs", path=d.getVar('PATH')) is not None
  485. def _get_repo_url(self, ud):
  486. """
  487. Return the repository URL
  488. """
  489. if ud.user:
  490. username = ud.user + '@'
  491. else:
  492. username = ""
  493. return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
  494. def _revision_key(self, ud, d, name):
  495. """
  496. Return a unique key for the url
  497. """
  498. return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
  499. def _lsremote(self, ud, d, search):
  500. """
  501. Run git ls-remote with the specified search string
  502. """
  503. # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
  504. # and WORKDIR is in PATH (as a result of RSS), our call to
  505. # runfetchcmd() exports PATH so this function will get called again (!)
  506. # In this scenario the return call of the function isn't actually
  507. # important - WORKDIR isn't needed in PATH to call git ls-remote
  508. # anyway.
  509. if d.getVar('_BB_GIT_IN_LSREMOTE', False):
  510. return ''
  511. d.setVar('_BB_GIT_IN_LSREMOTE', '1')
  512. try:
  513. repourl = self._get_repo_url(ud)
  514. cmd = "%s ls-remote %s %s" % \
  515. (ud.basecmd, repourl, search)
  516. if ud.proto.lower() != 'file':
  517. bb.fetch2.check_network_access(d, cmd, repourl)
  518. output = runfetchcmd(cmd, d, True)
  519. if not output:
  520. raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
  521. finally:
  522. d.delVar('_BB_GIT_IN_LSREMOTE')
  523. return output
  524. def _latest_revision(self, ud, d, name):
  525. """
  526. Compute the HEAD revision for the url
  527. """
  528. output = self._lsremote(ud, d, "")
  529. # Tags of the form ^{} may not work, need to fallback to other form
  530. if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
  531. head = ud.unresolvedrev[name]
  532. tag = ud.unresolvedrev[name]
  533. else:
  534. head = "refs/heads/%s" % ud.unresolvedrev[name]
  535. tag = "refs/tags/%s" % ud.unresolvedrev[name]
  536. for s in [head, tag + "^{}", tag]:
  537. for l in output.strip().split('\n'):
  538. sha1, ref = l.split()
  539. if s == ref:
  540. return sha1
  541. raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
  542. (ud.unresolvedrev[name], ud.host+ud.path))
  543. def latest_versionstring(self, ud, d):
  544. """
  545. Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
  546. by searching through the tags output of ls-remote, comparing
  547. versions and returning the highest match.
  548. """
  549. pupver = ('', '')
  550. tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
  551. try:
  552. output = self._lsremote(ud, d, "refs/tags/*")
  553. except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
  554. bb.note("Could not list remote: %s" % str(e))
  555. return pupver
  556. verstring = ""
  557. revision = ""
  558. for line in output.split("\n"):
  559. if not line:
  560. break
  561. tag_head = line.split("/")[-1]
  562. # Ignore non-released branches
  563. m = re.search(r"(alpha|beta|rc|final)+", tag_head)
  564. if m:
  565. continue
  566. # search for version in the line
  567. tag = tagregex.search(tag_head)
  568. if tag is None:
  569. continue
  570. tag = tag.group('pver')
  571. tag = tag.replace("_", ".")
  572. if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
  573. continue
  574. verstring = tag
  575. revision = line.split()[0]
  576. pupver = (verstring, revision)
  577. return pupver
  578. def _build_revision(self, ud, d, name):
  579. return ud.revisions[name]
  580. def gitpkgv_revision(self, ud, d, name):
  581. """
  582. Return a sortable revision number by counting commits in the history
  583. Based on gitpkgv.bblass in meta-openembedded
  584. """
  585. rev = self._build_revision(ud, d, name)
  586. localpath = ud.localpath
  587. rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
  588. if not os.path.exists(localpath):
  589. commits = None
  590. else:
  591. if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
  592. from pipes import quote
  593. commits = bb.fetch2.runfetchcmd(
  594. "git rev-list %s -- | wc -l" % quote(rev),
  595. d, quiet=True).strip().lstrip('0')
  596. if commits:
  597. open(rev_file, "w").write("%d\n" % int(commits))
  598. else:
  599. commits = open(rev_file, "r").readline(128).strip()
  600. if commits:
  601. return False, "%s+%s" % (commits, rev[:7])
  602. else:
  603. return True, str(rev)
  604. def checkstatus(self, fetch, ud, d):
  605. try:
  606. self._lsremote(ud, d, "")
  607. return True
  608. except bb.fetch2.FetchError:
  609. return False