__init__.py 66 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874
  1. # ex:ts=4:sw=4:sts=4:et
  2. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  3. """
  4. BitBake 'Fetch' implementations
  5. Classes for obtaining upstream sources for the
  6. BitBake build tools.
  7. """
  8. # Copyright (C) 2003, 2004 Chris Larson
  9. # Copyright (C) 2012 Intel Corporation
  10. #
  11. # SPDX-License-Identifier: GPL-2.0-only
  12. #
  13. # Based on functions from the base bb module, Copyright 2003 Holger Schurig
  14. import os, re
  15. import signal
  16. import logging
  17. import urllib.request, urllib.parse, urllib.error
  18. if 'git' not in urllib.parse.uses_netloc:
  19. urllib.parse.uses_netloc.append('git')
  20. import operator
  21. import collections
  22. import subprocess
  23. import pickle
  24. import errno
  25. import bb.persist_data, bb.utils
  26. import bb.checksum
  27. import bb.process
  28. import bb.event
  29. __version__ = "2"
  30. _checksum_cache = bb.checksum.FileChecksumCache()
  31. logger = logging.getLogger("BitBake.Fetcher")
  32. class BBFetchException(Exception):
  33. """Class all fetch exceptions inherit from"""
  34. def __init__(self, message):
  35. self.msg = message
  36. Exception.__init__(self, message)
  37. def __str__(self):
  38. return self.msg
  39. class UntrustedUrl(BBFetchException):
  40. """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
  41. def __init__(self, url, message=''):
  42. if message:
  43. msg = message
  44. else:
  45. msg = "The URL: '%s' is not trusted and cannot be used" % url
  46. self.url = url
  47. BBFetchException.__init__(self, msg)
  48. self.args = (url,)
  49. class MalformedUrl(BBFetchException):
  50. """Exception raised when encountering an invalid url"""
  51. def __init__(self, url, message=''):
  52. if message:
  53. msg = message
  54. else:
  55. msg = "The URL: '%s' is invalid and cannot be interpreted" % url
  56. self.url = url
  57. BBFetchException.__init__(self, msg)
  58. self.args = (url,)
  59. class FetchError(BBFetchException):
  60. """General fetcher exception when something happens incorrectly"""
  61. def __init__(self, message, url = None):
  62. if url:
  63. msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
  64. else:
  65. msg = "Fetcher failure: %s" % message
  66. self.url = url
  67. BBFetchException.__init__(self, msg)
  68. self.args = (message, url)
  69. class ChecksumError(FetchError):
  70. """Exception when mismatched checksum encountered"""
  71. def __init__(self, message, url = None, checksum = None):
  72. self.checksum = checksum
  73. FetchError.__init__(self, message, url)
  74. class NoChecksumError(FetchError):
  75. """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
  76. class UnpackError(BBFetchException):
  77. """General fetcher exception when something happens incorrectly when unpacking"""
  78. def __init__(self, message, url):
  79. msg = "Unpack failure for URL: '%s'. %s" % (url, message)
  80. self.url = url
  81. BBFetchException.__init__(self, msg)
  82. self.args = (message, url)
  83. class NoMethodError(BBFetchException):
  84. """Exception raised when there is no method to obtain a supplied url or set of urls"""
  85. def __init__(self, url):
  86. msg = "Could not find a fetcher which supports the URL: '%s'" % url
  87. self.url = url
  88. BBFetchException.__init__(self, msg)
  89. self.args = (url,)
  90. class MissingParameterError(BBFetchException):
  91. """Exception raised when a fetch method is missing a critical parameter in the url"""
  92. def __init__(self, missing, url):
  93. msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
  94. self.url = url
  95. self.missing = missing
  96. BBFetchException.__init__(self, msg)
  97. self.args = (missing, url)
  98. class ParameterError(BBFetchException):
  99. """Exception raised when a url cannot be proccessed due to invalid parameters."""
  100. def __init__(self, message, url):
  101. msg = "URL: '%s' has invalid parameters. %s" % (url, message)
  102. self.url = url
  103. BBFetchException.__init__(self, msg)
  104. self.args = (message, url)
  105. class NetworkAccess(BBFetchException):
  106. """Exception raised when network access is disabled but it is required."""
  107. def __init__(self, url, cmd):
  108. msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
  109. self.url = url
  110. self.cmd = cmd
  111. BBFetchException.__init__(self, msg)
  112. self.args = (url, cmd)
  113. class NonLocalMethod(Exception):
  114. def __init__(self):
  115. Exception.__init__(self)
  116. class MissingChecksumEvent(bb.event.Event):
  117. def __init__(self, url, md5sum, sha256sum):
  118. self.url = url
  119. self.checksums = {'md5sum': md5sum,
  120. 'sha256sum': sha256sum}
  121. bb.event.Event.__init__(self)
  122. class URI(object):
  123. """
  124. A class representing a generic URI, with methods for
  125. accessing the URI components, and stringifies to the
  126. URI.
  127. It is constructed by calling it with a URI, or setting
  128. the attributes manually:
  129. uri = URI("http://example.com/")
  130. uri = URI()
  131. uri.scheme = 'http'
  132. uri.hostname = 'example.com'
  133. uri.path = '/'
  134. It has the following attributes:
  135. * scheme (read/write)
  136. * userinfo (authentication information) (read/write)
  137. * username (read/write)
  138. * password (read/write)
  139. Note, password is deprecated as of RFC 3986.
  140. * hostname (read/write)
  141. * port (read/write)
  142. * hostport (read only)
  143. "hostname:port", if both are set, otherwise just "hostname"
  144. * path (read/write)
  145. * path_quoted (read/write)
  146. A URI quoted version of path
  147. * params (dict) (read/write)
  148. * query (dict) (read/write)
  149. * relative (bool) (read only)
  150. True if this is a "relative URI", (e.g. file:foo.diff)
  151. It stringifies to the URI itself.
  152. Some notes about relative URIs: while it's specified that
  153. a URI beginning with <scheme>:// should either be directly
  154. followed by a hostname or a /, the old URI handling of the
  155. fetch2 library did not comform to this. Therefore, this URI
  156. class has some kludges to make sure that URIs are parsed in
  157. a way comforming to bitbake's current usage. This URI class
  158. supports the following:
  159. file:relative/path.diff (IETF compliant)
  160. git:relative/path.git (IETF compliant)
  161. git:///absolute/path.git (IETF compliant)
  162. file:///absolute/path.diff (IETF compliant)
  163. file://relative/path.diff (not IETF compliant)
  164. But it does not support the following:
  165. file://hostname/absolute/path.diff (would be IETF compliant)
  166. Note that the last case only applies to a list of
  167. "whitelisted" schemes (currently only file://), that requires
  168. its URIs to not have a network location.
  169. """
  170. _relative_schemes = ['file', 'git']
  171. _netloc_forbidden = ['file']
  172. def __init__(self, uri=None):
  173. self.scheme = ''
  174. self.userinfo = ''
  175. self.hostname = ''
  176. self.port = None
  177. self._path = ''
  178. self.params = {}
  179. self.query = {}
  180. self.relative = False
  181. if not uri:
  182. return
  183. # We hijack the URL parameters, since the way bitbake uses
  184. # them are not quite RFC compliant.
  185. uri, param_str = (uri.split(";", 1) + [None])[:2]
  186. urlp = urllib.parse.urlparse(uri)
  187. self.scheme = urlp.scheme
  188. reparse = 0
  189. # Coerce urlparse to make URI scheme use netloc
  190. if not self.scheme in urllib.parse.uses_netloc:
  191. urllib.parse.uses_params.append(self.scheme)
  192. reparse = 1
  193. # Make urlparse happy(/ier) by converting local resources
  194. # to RFC compliant URL format. E.g.:
  195. # file://foo.diff -> file:foo.diff
  196. if urlp.scheme in self._netloc_forbidden:
  197. uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
  198. reparse = 1
  199. if reparse:
  200. urlp = urllib.parse.urlparse(uri)
  201. # Identify if the URI is relative or not
  202. if urlp.scheme in self._relative_schemes and \
  203. re.compile(r"^\w+:(?!//)").match(uri):
  204. self.relative = True
  205. if not self.relative:
  206. self.hostname = urlp.hostname or ''
  207. self.port = urlp.port
  208. self.userinfo += urlp.username or ''
  209. if urlp.password:
  210. self.userinfo += ':%s' % urlp.password
  211. self.path = urllib.parse.unquote(urlp.path)
  212. if param_str:
  213. self.params = self._param_str_split(param_str, ";")
  214. if urlp.query:
  215. self.query = self._param_str_split(urlp.query, "&")
  216. def __str__(self):
  217. userinfo = self.userinfo
  218. if userinfo:
  219. userinfo += '@'
  220. return "%s:%s%s%s%s%s%s" % (
  221. self.scheme,
  222. '' if self.relative else '//',
  223. userinfo,
  224. self.hostport,
  225. self.path_quoted,
  226. self._query_str(),
  227. self._param_str())
  228. def _param_str(self):
  229. return (
  230. ''.join([';', self._param_str_join(self.params, ";")])
  231. if self.params else '')
  232. def _query_str(self):
  233. return (
  234. ''.join(['?', self._param_str_join(self.query, "&")])
  235. if self.query else '')
  236. def _param_str_split(self, string, elmdelim, kvdelim="="):
  237. ret = collections.OrderedDict()
  238. for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
  239. ret[k] = v
  240. return ret
  241. def _param_str_join(self, dict_, elmdelim, kvdelim="="):
  242. return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
  243. @property
  244. def hostport(self):
  245. if not self.port:
  246. return self.hostname
  247. return "%s:%d" % (self.hostname, self.port)
  248. @property
  249. def path_quoted(self):
  250. return urllib.parse.quote(self.path)
  251. @path_quoted.setter
  252. def path_quoted(self, path):
  253. self.path = urllib.parse.unquote(path)
  254. @property
  255. def path(self):
  256. return self._path
  257. @path.setter
  258. def path(self, path):
  259. self._path = path
  260. if not path or re.compile("^/").match(path):
  261. self.relative = False
  262. else:
  263. self.relative = True
  264. @property
  265. def username(self):
  266. if self.userinfo:
  267. return (self.userinfo.split(":", 1))[0]
  268. return ''
  269. @username.setter
  270. def username(self, username):
  271. password = self.password
  272. self.userinfo = username
  273. if password:
  274. self.userinfo += ":%s" % password
  275. @property
  276. def password(self):
  277. if self.userinfo and ":" in self.userinfo:
  278. return (self.userinfo.split(":", 1))[1]
  279. return ''
  280. @password.setter
  281. def password(self, password):
  282. self.userinfo = "%s:%s" % (self.username, password)
  283. def decodeurl(url):
  284. """Decodes an URL into the tokens (scheme, network location, path,
  285. user, password, parameters).
  286. """
  287. m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
  288. if not m:
  289. raise MalformedUrl(url)
  290. type = m.group('type')
  291. location = m.group('location')
  292. if not location:
  293. raise MalformedUrl(url)
  294. user = m.group('user')
  295. parm = m.group('parm')
  296. locidx = location.find('/')
  297. if locidx != -1 and type.lower() != 'file':
  298. host = location[:locidx]
  299. path = location[locidx:]
  300. elif type.lower() == 'file':
  301. host = ""
  302. path = location
  303. else:
  304. host = location
  305. path = "/"
  306. if user:
  307. m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
  308. if m:
  309. user = m.group('user')
  310. pswd = m.group('pswd')
  311. else:
  312. user = ''
  313. pswd = ''
  314. p = collections.OrderedDict()
  315. if parm:
  316. for s in parm.split(';'):
  317. if s:
  318. if not '=' in s:
  319. raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
  320. s1, s2 = s.split('=')
  321. p[s1] = s2
  322. return type, host, urllib.parse.unquote(path), user, pswd, p
  323. def encodeurl(decoded):
  324. """Encodes a URL from tokens (scheme, network location, path,
  325. user, password, parameters).
  326. """
  327. type, host, path, user, pswd, p = decoded
  328. if not type:
  329. raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
  330. url = '%s://' % type
  331. if user and type != "file":
  332. url += "%s" % user
  333. if pswd:
  334. url += ":%s" % pswd
  335. url += "@"
  336. if host and type != "file":
  337. url += "%s" % host
  338. if path:
  339. # Standardise path to ensure comparisons work
  340. while '//' in path:
  341. path = path.replace("//", "/")
  342. url += "%s" % urllib.parse.quote(path)
  343. if p:
  344. for parm in p:
  345. url += ";%s=%s" % (parm, p[parm])
  346. return url
  347. def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
  348. if not ud.url or not uri_find or not uri_replace:
  349. logger.error("uri_replace: passed an undefined value, not replacing")
  350. return None
  351. uri_decoded = list(decodeurl(ud.url))
  352. uri_find_decoded = list(decodeurl(uri_find))
  353. uri_replace_decoded = list(decodeurl(uri_replace))
  354. logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
  355. result_decoded = ['', '', '', '', '', {}]
  356. for loc, i in enumerate(uri_find_decoded):
  357. result_decoded[loc] = uri_decoded[loc]
  358. regexp = i
  359. if loc == 0 and regexp and not regexp.endswith("$"):
  360. # Leaving the type unanchored can mean "https" matching "file" can become "files"
  361. # which is clearly undesirable.
  362. regexp += "$"
  363. if loc == 5:
  364. # Handle URL parameters
  365. if i:
  366. # Any specified URL parameters must match
  367. for k in uri_find_decoded[loc]:
  368. if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
  369. return None
  370. # Overwrite any specified replacement parameters
  371. for k in uri_replace_decoded[loc]:
  372. for l in replacements:
  373. uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
  374. result_decoded[loc][k] = uri_replace_decoded[loc][k]
  375. elif (re.match(regexp, uri_decoded[loc])):
  376. if not uri_replace_decoded[loc]:
  377. result_decoded[loc] = ""
  378. else:
  379. for k in replacements:
  380. uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
  381. #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
  382. result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
  383. if loc == 2:
  384. # Handle path manipulations
  385. basename = None
  386. if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
  387. # If the source and destination url types differ, must be a mirrortarball mapping
  388. basename = os.path.basename(mirrortarball)
  389. # Kill parameters, they make no sense for mirror tarballs
  390. uri_decoded[5] = {}
  391. elif ud.localpath and ud.method.supports_checksum(ud):
  392. basename = os.path.basename(ud.localpath)
  393. if basename and not result_decoded[loc].endswith(basename):
  394. result_decoded[loc] = os.path.join(result_decoded[loc], basename)
  395. else:
  396. return None
  397. result = encodeurl(result_decoded)
  398. if result == ud.url:
  399. return None
  400. logger.debug(2, "For url %s returning %s" % (ud.url, result))
  401. return result
  402. methods = []
  403. urldata_cache = {}
  404. saved_headrevs = {}
  405. def fetcher_init(d):
  406. """
  407. Called to initialize the fetchers once the configuration data is known.
  408. Calls before this must not hit the cache.
  409. """
  410. # When to drop SCM head revisions controlled by user policy
  411. srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
  412. if srcrev_policy == "cache":
  413. logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
  414. elif srcrev_policy == "clear":
  415. logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
  416. revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
  417. try:
  418. bb.fetch2.saved_headrevs = revs.items()
  419. except:
  420. pass
  421. revs.clear()
  422. else:
  423. raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
  424. _checksum_cache.init_cache(d)
  425. for m in methods:
  426. if hasattr(m, "init"):
  427. m.init(d)
  428. def fetcher_parse_save():
  429. _checksum_cache.save_extras()
  430. def fetcher_parse_done():
  431. _checksum_cache.save_merge()
  432. def fetcher_compare_revisions(d):
  433. """
  434. Compare the revisions in the persistant cache with current values and
  435. return true/false on whether they've changed.
  436. """
  437. data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
  438. data2 = bb.fetch2.saved_headrevs
  439. changed = False
  440. for key in data:
  441. if key not in data2 or data2[key] != data[key]:
  442. logger.debug(1, "%s changed", key)
  443. changed = True
  444. return True
  445. else:
  446. logger.debug(2, "%s did not change", key)
  447. return False
  448. def mirror_from_string(data):
  449. mirrors = (data or "").replace('\\n',' ').split()
  450. # Split into pairs
  451. if len(mirrors) % 2 != 0:
  452. bb.warn('Invalid mirror data %s, should have paired members.' % data)
  453. return list(zip(*[iter(mirrors)]*2))
  454. def verify_checksum(ud, d, precomputed={}):
  455. """
  456. verify the MD5 and SHA256 checksum for downloaded src
  457. Raises a FetchError if one or both of the SRC_URI checksums do not match
  458. the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
  459. checksums specified.
  460. Returns a dict of checksums that can be stored in a done stamp file and
  461. passed in as precomputed parameter in a later call to avoid re-computing
  462. the checksums from the file. This allows verifying the checksums of the
  463. file against those in the recipe each time, rather than only after
  464. downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
  465. """
  466. _MD5_KEY = "md5"
  467. _SHA256_KEY = "sha256"
  468. if ud.ignore_checksums or not ud.method.supports_checksum(ud):
  469. return {}
  470. if _MD5_KEY in precomputed:
  471. md5data = precomputed[_MD5_KEY]
  472. else:
  473. md5data = bb.utils.md5_file(ud.localpath)
  474. if _SHA256_KEY in precomputed:
  475. sha256data = precomputed[_SHA256_KEY]
  476. else:
  477. sha256data = bb.utils.sha256_file(ud.localpath)
  478. if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
  479. # If strict checking enabled and neither sum defined, raise error
  480. strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
  481. if strict == "1":
  482. logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
  483. 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
  484. (ud.localpath, ud.md5_name, md5data,
  485. ud.sha256_name, sha256data))
  486. raise NoChecksumError('Missing SRC_URI checksum', ud.url)
  487. bb.event.fire(MissingChecksumEvent(ud.url, md5data, sha256data), d)
  488. if strict == "ignore":
  489. return {
  490. _MD5_KEY: md5data,
  491. _SHA256_KEY: sha256data
  492. }
  493. # Log missing sums so user can more easily add them
  494. logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
  495. 'SRC_URI[%s] = "%s"',
  496. ud.localpath, ud.md5_name, md5data)
  497. logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
  498. 'SRC_URI[%s] = "%s"',
  499. ud.localpath, ud.sha256_name, sha256data)
  500. # We want to alert the user if a checksum is defined in the recipe but
  501. # it does not match.
  502. msg = ""
  503. mismatch = False
  504. if ud.md5_expected and ud.md5_expected != md5data:
  505. msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
  506. mismatch = True;
  507. if ud.sha256_expected and ud.sha256_expected != sha256data:
  508. msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
  509. mismatch = True;
  510. if mismatch:
  511. msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
  512. if len(msg):
  513. raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
  514. return {
  515. _MD5_KEY: md5data,
  516. _SHA256_KEY: sha256data
  517. }
  518. def verify_donestamp(ud, d, origud=None):
  519. """
  520. Check whether the done stamp file has the right checksums (if the fetch
  521. method supports them). If it doesn't, delete the done stamp and force
  522. a re-download.
  523. Returns True, if the donestamp exists and is valid, False otherwise. When
  524. returning False, any existing done stamps are removed.
  525. """
  526. if not ud.needdonestamp or (origud and not origud.needdonestamp):
  527. return True
  528. if not os.path.exists(ud.localpath):
  529. # local path does not exist
  530. if os.path.exists(ud.donestamp):
  531. # done stamp exists, but the downloaded file does not; the done stamp
  532. # must be incorrect, re-trigger the download
  533. bb.utils.remove(ud.donestamp)
  534. return False
  535. if (not ud.method.supports_checksum(ud) or
  536. (origud and not origud.method.supports_checksum(origud))):
  537. # if done stamp exists and checksums not supported; assume the local
  538. # file is current
  539. return os.path.exists(ud.donestamp)
  540. precomputed_checksums = {}
  541. # Only re-use the precomputed checksums if the donestamp is newer than the
  542. # file. Do not rely on the mtime of directories, though. If ud.localpath is
  543. # a directory, there will probably not be any checksums anyway.
  544. if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
  545. os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
  546. try:
  547. with open(ud.donestamp, "rb") as cachefile:
  548. pickled = pickle.Unpickler(cachefile)
  549. precomputed_checksums.update(pickled.load())
  550. except Exception as e:
  551. # Avoid the warnings on the upgrade path from emtpy done stamp
  552. # files to those containing the checksums.
  553. if not isinstance(e, EOFError):
  554. # Ignore errors, they aren't fatal
  555. logger.warning("Couldn't load checksums from donestamp %s: %s "
  556. "(msg: %s)" % (ud.donestamp, type(e).__name__,
  557. str(e)))
  558. try:
  559. checksums = verify_checksum(ud, d, precomputed_checksums)
  560. # If the cache file did not have the checksums, compute and store them
  561. # as an upgrade path from the previous done stamp file format.
  562. if checksums != precomputed_checksums:
  563. with open(ud.donestamp, "wb") as cachefile:
  564. p = pickle.Pickler(cachefile, 2)
  565. p.dump(checksums)
  566. return True
  567. except ChecksumError as e:
  568. # Checksums failed to verify, trigger re-download and remove the
  569. # incorrect stamp file.
  570. logger.warning("Checksum mismatch for local file %s\n"
  571. "Cleaning and trying again." % ud.localpath)
  572. if os.path.exists(ud.localpath):
  573. rename_bad_checksum(ud, e.checksum)
  574. bb.utils.remove(ud.donestamp)
  575. return False
  576. def update_stamp(ud, d):
  577. """
  578. donestamp is file stamp indicating the whole fetching is done
  579. this function update the stamp after verifying the checksum
  580. """
  581. if not ud.needdonestamp:
  582. return
  583. if os.path.exists(ud.donestamp):
  584. # Touch the done stamp file to show active use of the download
  585. try:
  586. os.utime(ud.donestamp, None)
  587. except:
  588. # Errors aren't fatal here
  589. pass
  590. else:
  591. try:
  592. checksums = verify_checksum(ud, d)
  593. # Store the checksums for later re-verification against the recipe
  594. with open(ud.donestamp, "wb") as cachefile:
  595. p = pickle.Pickler(cachefile, 2)
  596. p.dump(checksums)
  597. except ChecksumError as e:
  598. # Checksums failed to verify, trigger re-download and remove the
  599. # incorrect stamp file.
  600. logger.warning("Checksum mismatch for local file %s\n"
  601. "Cleaning and trying again." % ud.localpath)
  602. if os.path.exists(ud.localpath):
  603. rename_bad_checksum(ud, e.checksum)
  604. bb.utils.remove(ud.donestamp)
  605. raise
  606. def subprocess_setup():
  607. # Python installs a SIGPIPE handler by default. This is usually not what
  608. # non-Python subprocesses expect.
  609. # SIGPIPE errors are known issues with gzip/bash
  610. signal.signal(signal.SIGPIPE, signal.SIG_DFL)
  611. def get_autorev(d):
  612. # only not cache src rev in autorev case
  613. if d.getVar('BB_SRCREV_POLICY') != "cache":
  614. d.setVar('BB_DONT_CACHE', '1')
  615. return "AUTOINC"
  616. def get_srcrev(d, method_name='sortable_revision'):
  617. """
  618. Return the revision string, usually for use in the version string (PV) of the current package
  619. Most packages usually only have one SCM so we just pass on the call.
  620. In the multi SCM case, we build a value based on SRCREV_FORMAT which must
  621. have been set.
  622. The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
  623. incremental, other code is then responsible for turning that into an increasing value (if needed)
  624. A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
  625. that fetcher provides a method with the given name and the same signature as sortable_revision.
  626. """
  627. scms = []
  628. fetcher = Fetch(d.getVar('SRC_URI').split(), d)
  629. urldata = fetcher.ud
  630. for u in urldata:
  631. if urldata[u].method.supports_srcrev():
  632. scms.append(u)
  633. if len(scms) == 0:
  634. raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
  635. if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
  636. autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
  637. if len(rev) > 10:
  638. rev = rev[:10]
  639. if autoinc:
  640. return "AUTOINC+" + rev
  641. return rev
  642. #
  643. # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
  644. #
  645. format = d.getVar('SRCREV_FORMAT')
  646. if not format:
  647. raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
  648. "The SCMs are:\n%s" % '\n'.join(scms))
  649. name_to_rev = {}
  650. seenautoinc = False
  651. for scm in scms:
  652. ud = urldata[scm]
  653. for name in ud.names:
  654. autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
  655. seenautoinc = seenautoinc or autoinc
  656. if len(rev) > 10:
  657. rev = rev[:10]
  658. name_to_rev[name] = rev
  659. # Replace names by revisions in the SRCREV_FORMAT string. The approach used
  660. # here can handle names being prefixes of other names and names appearing
  661. # as substrings in revisions (in which case the name should not be
  662. # expanded). The '|' regular expression operator tries matches from left to
  663. # right, so we need to sort the names with the longest ones first.
  664. names_descending_len = sorted(name_to_rev, key=len, reverse=True)
  665. name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
  666. format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
  667. if seenautoinc:
  668. format = "AUTOINC+" + format
  669. return format
  670. def localpath(url, d):
  671. fetcher = bb.fetch2.Fetch([url], d)
  672. return fetcher.localpath(url)
  673. def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
  674. """
  675. Run cmd returning the command output
  676. Raise an error if interrupted or cmd fails
  677. Optionally echo command output to stdout
  678. Optionally remove the files/directories listed in cleanup upon failure
  679. """
  680. # Need to export PATH as binary could be in metadata paths
  681. # rather than host provided
  682. # Also include some other variables.
  683. # FIXME: Should really include all export varaiables?
  684. exportvars = ['HOME', 'PATH',
  685. 'HTTP_PROXY', 'http_proxy',
  686. 'HTTPS_PROXY', 'https_proxy',
  687. 'FTP_PROXY', 'ftp_proxy',
  688. 'FTPS_PROXY', 'ftps_proxy',
  689. 'NO_PROXY', 'no_proxy',
  690. 'ALL_PROXY', 'all_proxy',
  691. 'GIT_PROXY_COMMAND',
  692. 'GIT_SSH',
  693. 'GIT_SSL_CAINFO',
  694. 'GIT_SMART_HTTP',
  695. 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
  696. 'SOCKS5_USER', 'SOCKS5_PASSWD',
  697. 'DBUS_SESSION_BUS_ADDRESS',
  698. 'P4CONFIG']
  699. if not cleanup:
  700. cleanup = []
  701. # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
  702. # can end up in circular recursion here so give the option of breaking it
  703. # in a data store copy.
  704. try:
  705. d.getVar("PV")
  706. d.getVar("PR")
  707. except bb.data_smart.ExpansionError:
  708. d = bb.data.createCopy(d)
  709. d.setVar("PV", "fetcheravoidrecurse")
  710. d.setVar("PR", "fetcheravoidrecurse")
  711. origenv = d.getVar("BB_ORIGENV", False)
  712. for var in exportvars:
  713. val = d.getVar(var) or (origenv and origenv.getVar(var))
  714. if val:
  715. cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
  716. # Ensure that a _PYTHON_SYSCONFIGDATA_NAME value set by a recipe
  717. # (for example via python3native.bbclass since warrior) is not set for
  718. # host Python (otherwise tools like git-make-shallow will fail)
  719. cmd = 'unset _PYTHON_SYSCONFIGDATA_NAME; ' + cmd
  720. # Disable pseudo as it may affect ssh, potentially causing it to hang.
  721. cmd = 'export PSEUDO_DISABLED=1; ' + cmd
  722. if workdir:
  723. logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
  724. else:
  725. logger.debug(1, "Running %s", cmd)
  726. success = False
  727. error_message = ""
  728. try:
  729. (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
  730. success = True
  731. except bb.process.NotFoundError as e:
  732. error_message = "Fetch command %s" % (e.command)
  733. except bb.process.ExecutionError as e:
  734. if e.stdout:
  735. output = "output:\n%s\n%s" % (e.stdout, e.stderr)
  736. elif e.stderr:
  737. output = "output:\n%s" % e.stderr
  738. else:
  739. output = "no output"
  740. error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
  741. except bb.process.CmdError as e:
  742. error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
  743. if not success:
  744. for f in cleanup:
  745. try:
  746. bb.utils.remove(f, True)
  747. except OSError:
  748. pass
  749. raise FetchError(error_message)
  750. return output
  751. def check_network_access(d, info, url):
  752. """
  753. log remote network access, and error if BB_NO_NETWORK is set or the given
  754. URI is untrusted
  755. """
  756. if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
  757. raise NetworkAccess(url, info)
  758. elif not trusted_network(d, url):
  759. raise UntrustedUrl(url, info)
  760. else:
  761. logger.debug(1, "Fetcher accessed the network with the command %s" % info)
  762. def build_mirroruris(origud, mirrors, ld):
  763. uris = []
  764. uds = []
  765. replacements = {}
  766. replacements["TYPE"] = origud.type
  767. replacements["HOST"] = origud.host
  768. replacements["PATH"] = origud.path
  769. replacements["BASENAME"] = origud.path.split("/")[-1]
  770. replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
  771. def adduri(ud, uris, uds, mirrors, tarballs):
  772. for line in mirrors:
  773. try:
  774. (find, replace) = line
  775. except ValueError:
  776. continue
  777. for tarball in tarballs:
  778. newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
  779. if not newuri or newuri in uris or newuri == origud.url:
  780. continue
  781. if not trusted_network(ld, newuri):
  782. logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
  783. continue
  784. # Create a local copy of the mirrors minus the current line
  785. # this will prevent us from recursively processing the same line
  786. # as well as indirect recursion A -> B -> C -> A
  787. localmirrors = list(mirrors)
  788. localmirrors.remove(line)
  789. try:
  790. newud = FetchData(newuri, ld)
  791. newud.setup_localpath(ld)
  792. except bb.fetch2.BBFetchException as e:
  793. logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
  794. logger.debug(1, str(e))
  795. try:
  796. # setup_localpath of file:// urls may fail, we should still see
  797. # if mirrors of the url exist
  798. adduri(newud, uris, uds, localmirrors, tarballs)
  799. except UnboundLocalError:
  800. pass
  801. continue
  802. uris.append(newuri)
  803. uds.append(newud)
  804. adduri(newud, uris, uds, localmirrors, tarballs)
  805. adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
  806. return uris, uds
  807. def rename_bad_checksum(ud, suffix):
  808. """
  809. Renames files to have suffix from parameter
  810. """
  811. if ud.localpath is None:
  812. return
  813. new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
  814. bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
  815. bb.utils.movefile(ud.localpath, new_localpath)
  816. def try_mirror_url(fetch, origud, ud, ld, check = False):
  817. # Return of None or a value means we're finished
  818. # False means try another url
  819. if ud.lockfile and ud.lockfile != origud.lockfile:
  820. lf = bb.utils.lockfile(ud.lockfile)
  821. try:
  822. if check:
  823. found = ud.method.checkstatus(fetch, ud, ld)
  824. if found:
  825. return found
  826. return False
  827. if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
  828. ud.method.download(ud, ld)
  829. if hasattr(ud.method,"build_mirror_data"):
  830. ud.method.build_mirror_data(ud, ld)
  831. if not ud.localpath or not os.path.exists(ud.localpath):
  832. return False
  833. if ud.localpath == origud.localpath:
  834. return ud.localpath
  835. # We may be obtaining a mirror tarball which needs further processing by the real fetcher
  836. # If that tarball is a local file:// we need to provide a symlink to it
  837. dldir = ld.getVar("DL_DIR")
  838. if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
  839. # Create donestamp in old format to avoid triggering a re-download
  840. if ud.donestamp:
  841. bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
  842. open(ud.donestamp, 'w').close()
  843. dest = os.path.join(dldir, os.path.basename(ud.localpath))
  844. if not os.path.exists(dest):
  845. # In case this is executing without any file locks held (as is
  846. # the case for file:// URLs), two tasks may end up here at the
  847. # same time, in which case we do not want the second task to
  848. # fail when the link has already been created by the first task.
  849. try:
  850. os.symlink(ud.localpath, dest)
  851. except FileExistsError:
  852. pass
  853. if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
  854. origud.method.download(origud, ld)
  855. if hasattr(origud.method, "build_mirror_data"):
  856. origud.method.build_mirror_data(origud, ld)
  857. return origud.localpath
  858. # Otherwise the result is a local file:// and we symlink to it
  859. ensure_symlink(ud.localpath, origud.localpath)
  860. update_stamp(origud, ld)
  861. return ud.localpath
  862. except bb.fetch2.NetworkAccess:
  863. raise
  864. except IOError as e:
  865. if e.errno in [errno.ESTALE]:
  866. logger.warning("Stale Error Observed %s." % ud.url)
  867. return False
  868. raise
  869. except bb.fetch2.BBFetchException as e:
  870. if isinstance(e, ChecksumError):
  871. logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
  872. logger.warning(str(e))
  873. if os.path.exists(ud.localpath):
  874. rename_bad_checksum(ud, e.checksum)
  875. elif isinstance(e, NoChecksumError):
  876. raise
  877. else:
  878. logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
  879. logger.debug(1, str(e))
  880. try:
  881. ud.method.clean(ud, ld)
  882. except UnboundLocalError:
  883. pass
  884. return False
  885. finally:
  886. if ud.lockfile and ud.lockfile != origud.lockfile:
  887. bb.utils.unlockfile(lf)
  888. def ensure_symlink(target, link_name):
  889. if not os.path.exists(link_name):
  890. if os.path.islink(link_name):
  891. # Broken symbolic link
  892. os.unlink(link_name)
  893. # In case this is executing without any file locks held (as is
  894. # the case for file:// URLs), two tasks may end up here at the
  895. # same time, in which case we do not want the second task to
  896. # fail when the link has already been created by the first task.
  897. try:
  898. os.symlink(target, link_name)
  899. except FileExistsError:
  900. pass
  901. def try_mirrors(fetch, d, origud, mirrors, check = False):
  902. """
  903. Try to use a mirrored version of the sources.
  904. This method will be automatically called before the fetchers go.
  905. d Is a bb.data instance
  906. uri is the original uri we're trying to download
  907. mirrors is the list of mirrors we're going to try
  908. """
  909. ld = d.createCopy()
  910. uris, uds = build_mirroruris(origud, mirrors, ld)
  911. for index, uri in enumerate(uris):
  912. ret = try_mirror_url(fetch, origud, uds[index], ld, check)
  913. if ret != False:
  914. return ret
  915. return None
  916. def trusted_network(d, url):
  917. """
  918. Use a trusted url during download if networking is enabled and
  919. BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
  920. Note: modifies SRC_URI & mirrors.
  921. """
  922. if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
  923. return True
  924. pkgname = d.expand(d.getVar('PN', False))
  925. trusted_hosts = None
  926. if pkgname:
  927. trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
  928. if not trusted_hosts:
  929. trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
  930. # Not enabled.
  931. if not trusted_hosts:
  932. return True
  933. scheme, network, path, user, passwd, param = decodeurl(url)
  934. if not network:
  935. return True
  936. network = network.split(':')[0]
  937. network = network.lower()
  938. for host in trusted_hosts.split(" "):
  939. host = host.lower()
  940. if host.startswith("*.") and ("." + network).endswith(host[1:]):
  941. return True
  942. if host == network:
  943. return True
  944. return False
  945. def srcrev_internal_helper(ud, d, name):
  946. """
  947. Return:
  948. a) a source revision if specified
  949. b) latest revision if SRCREV="AUTOINC"
  950. c) None if not specified
  951. """
  952. srcrev = None
  953. pn = d.getVar("PN")
  954. attempts = []
  955. if name != '' and pn:
  956. attempts.append("SRCREV_%s_pn-%s" % (name, pn))
  957. if name != '':
  958. attempts.append("SRCREV_%s" % name)
  959. if pn:
  960. attempts.append("SRCREV_pn-%s" % pn)
  961. attempts.append("SRCREV")
  962. for a in attempts:
  963. srcrev = d.getVar(a)
  964. if srcrev and srcrev != "INVALID":
  965. break
  966. if 'rev' in ud.parm and 'tag' in ud.parm:
  967. raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
  968. if 'rev' in ud.parm or 'tag' in ud.parm:
  969. if 'rev' in ud.parm:
  970. parmrev = ud.parm['rev']
  971. else:
  972. parmrev = ud.parm['tag']
  973. if srcrev == "INVALID" or not srcrev:
  974. return parmrev
  975. if srcrev != parmrev:
  976. raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
  977. return parmrev
  978. if srcrev == "INVALID" or not srcrev:
  979. raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
  980. if srcrev == "AUTOINC":
  981. srcrev = ud.method.latest_revision(ud, d, name)
  982. return srcrev
  983. def get_checksum_file_list(d):
  984. """ Get a list of files checksum in SRC_URI
  985. Returns the resolved local paths of all local file entries in
  986. SRC_URI as a space-separated string
  987. """
  988. fetch = Fetch([], d, cache = False, localonly = True)
  989. dl_dir = d.getVar('DL_DIR')
  990. filelist = []
  991. for u in fetch.urls:
  992. ud = fetch.ud[u]
  993. if ud and isinstance(ud.method, local.Local):
  994. paths = ud.method.localpaths(ud, d)
  995. for f in paths:
  996. pth = ud.decodedurl
  997. if '*' in pth:
  998. f = os.path.join(os.path.abspath(f), pth)
  999. if f.startswith(dl_dir):
  1000. # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
  1001. if os.path.exists(f):
  1002. bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
  1003. else:
  1004. bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
  1005. filelist.append(f + ":" + str(os.path.exists(f)))
  1006. return " ".join(filelist)
  1007. def get_file_checksums(filelist, pn):
  1008. """Get a list of the checksums for a list of local files
  1009. Returns the checksums for a list of local files, caching the results as
  1010. it proceeds
  1011. """
  1012. return _checksum_cache.get_checksums(filelist, pn)
  1013. class FetchData(object):
  1014. """
  1015. A class which represents the fetcher state for a given URI.
  1016. """
  1017. def __init__(self, url, d, localonly = False):
  1018. # localpath is the location of a downloaded result. If not set, the file is local.
  1019. self.donestamp = None
  1020. self.needdonestamp = True
  1021. self.localfile = ""
  1022. self.localpath = None
  1023. self.lockfile = None
  1024. self.mirrortarballs = []
  1025. self.basename = None
  1026. self.basepath = None
  1027. (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
  1028. self.date = self.getSRCDate(d)
  1029. self.url = url
  1030. if not self.user and "user" in self.parm:
  1031. self.user = self.parm["user"]
  1032. if not self.pswd and "pswd" in self.parm:
  1033. self.pswd = self.parm["pswd"]
  1034. self.setup = False
  1035. if "name" in self.parm:
  1036. self.md5_name = "%s.md5sum" % self.parm["name"]
  1037. self.sha256_name = "%s.sha256sum" % self.parm["name"]
  1038. else:
  1039. self.md5_name = "md5sum"
  1040. self.sha256_name = "sha256sum"
  1041. if self.md5_name in self.parm:
  1042. self.md5_expected = self.parm[self.md5_name]
  1043. elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
  1044. self.md5_expected = None
  1045. else:
  1046. self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
  1047. if self.sha256_name in self.parm:
  1048. self.sha256_expected = self.parm[self.sha256_name]
  1049. elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
  1050. self.sha256_expected = None
  1051. else:
  1052. self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
  1053. self.ignore_checksums = False
  1054. self.names = self.parm.get("name",'default').split(',')
  1055. self.method = None
  1056. for m in methods:
  1057. if m.supports(self, d):
  1058. self.method = m
  1059. break
  1060. if not self.method:
  1061. raise NoMethodError(url)
  1062. if localonly and not isinstance(self.method, local.Local):
  1063. raise NonLocalMethod()
  1064. if self.parm.get("proto", None) and "protocol" not in self.parm:
  1065. logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
  1066. self.parm["protocol"] = self.parm.get("proto", None)
  1067. if hasattr(self.method, "urldata_init"):
  1068. self.method.urldata_init(self, d)
  1069. if "localpath" in self.parm:
  1070. # if user sets localpath for file, use it instead.
  1071. self.localpath = self.parm["localpath"]
  1072. self.basename = os.path.basename(self.localpath)
  1073. elif self.localfile:
  1074. self.localpath = self.method.localpath(self, d)
  1075. dldir = d.getVar("DL_DIR")
  1076. if not self.needdonestamp:
  1077. return
  1078. # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
  1079. if self.localpath and self.localpath.startswith(dldir):
  1080. basepath = self.localpath
  1081. elif self.localpath:
  1082. basepath = dldir + os.sep + os.path.basename(self.localpath)
  1083. elif self.basepath or self.basename:
  1084. basepath = dldir + os.sep + (self.basepath or self.basename)
  1085. else:
  1086. bb.fatal("Can't determine lock path for url %s" % url)
  1087. self.donestamp = basepath + '.done'
  1088. self.lockfile = basepath + '.lock'
  1089. def setup_revisions(self, d):
  1090. self.revisions = {}
  1091. for name in self.names:
  1092. self.revisions[name] = srcrev_internal_helper(self, d, name)
  1093. # add compatibility code for non name specified case
  1094. if len(self.names) == 1:
  1095. self.revision = self.revisions[self.names[0]]
  1096. def setup_localpath(self, d):
  1097. if not self.localpath:
  1098. self.localpath = self.method.localpath(self, d)
  1099. def getSRCDate(self, d):
  1100. """
  1101. Return the SRC Date for the component
  1102. d the bb.data module
  1103. """
  1104. if "srcdate" in self.parm:
  1105. return self.parm['srcdate']
  1106. pn = d.getVar("PN")
  1107. if pn:
  1108. return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
  1109. return d.getVar("SRCDATE") or d.getVar("DATE")
  1110. class FetchMethod(object):
  1111. """Base class for 'fetch'ing data"""
  1112. def __init__(self, urls=None):
  1113. self.urls = []
  1114. def supports(self, urldata, d):
  1115. """
  1116. Check to see if this fetch class supports a given url.
  1117. """
  1118. return 0
  1119. def localpath(self, urldata, d):
  1120. """
  1121. Return the local filename of a given url assuming a successful fetch.
  1122. Can also setup variables in urldata for use in go (saving code duplication
  1123. and duplicate code execution)
  1124. """
  1125. return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
  1126. def supports_checksum(self, urldata):
  1127. """
  1128. Is localpath something that can be represented by a checksum?
  1129. """
  1130. # We cannot compute checksums for directories
  1131. if os.path.isdir(urldata.localpath) == True:
  1132. return False
  1133. if urldata.localpath.find("*") != -1:
  1134. return False
  1135. return True
  1136. def recommends_checksum(self, urldata):
  1137. """
  1138. Is the backend on where checksumming is recommended (should warnings
  1139. be displayed if there is no checksum)?
  1140. """
  1141. return False
  1142. def _strip_leading_slashes(self, relpath):
  1143. """
  1144. Remove leading slash as os.path.join can't cope
  1145. """
  1146. while os.path.isabs(relpath):
  1147. relpath = relpath[1:]
  1148. return relpath
  1149. def setUrls(self, urls):
  1150. self.__urls = urls
  1151. def getUrls(self):
  1152. return self.__urls
  1153. urls = property(getUrls, setUrls, None, "Urls property")
  1154. def need_update(self, ud, d):
  1155. """
  1156. Force a fetch, even if localpath exists?
  1157. """
  1158. if os.path.exists(ud.localpath):
  1159. return False
  1160. return True
  1161. def supports_srcrev(self):
  1162. """
  1163. The fetcher supports auto source revisions (SRCREV)
  1164. """
  1165. return False
  1166. def download(self, urldata, d):
  1167. """
  1168. Fetch urls
  1169. Assumes localpath was called first
  1170. """
  1171. raise NoMethodError(urldata.url)
  1172. def unpack(self, urldata, rootdir, data):
  1173. iterate = False
  1174. file = urldata.localpath
  1175. # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
  1176. # but it must be corrected back for local files copying
  1177. if urldata.basename == '*' and file.endswith('/.'):
  1178. file = '%s/%s' % (file.rstrip('/.'), urldata.path)
  1179. try:
  1180. unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
  1181. except ValueError as exc:
  1182. bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
  1183. (file, urldata.parm.get('unpack')))
  1184. base, ext = os.path.splitext(file)
  1185. if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
  1186. efile = os.path.join(rootdir, os.path.basename(base))
  1187. else:
  1188. efile = file
  1189. cmd = None
  1190. if unpack:
  1191. if file.endswith('.tar'):
  1192. cmd = 'tar x --no-same-owner -f %s' % file
  1193. elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
  1194. cmd = 'tar xz --no-same-owner -f %s' % file
  1195. elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
  1196. cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
  1197. elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
  1198. cmd = 'gzip -dc %s > %s' % (file, efile)
  1199. elif file.endswith('.bz2'):
  1200. cmd = 'bzip2 -dc %s > %s' % (file, efile)
  1201. elif file.endswith('.txz') or file.endswith('.tar.xz'):
  1202. cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
  1203. elif file.endswith('.xz'):
  1204. cmd = 'xz -dc %s > %s' % (file, efile)
  1205. elif file.endswith('.tar.lz'):
  1206. cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
  1207. elif file.endswith('.lz'):
  1208. cmd = 'lzip -dc %s > %s' % (file, efile)
  1209. elif file.endswith('.tar.7z'):
  1210. cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
  1211. elif file.endswith('.7z'):
  1212. cmd = '7za x -y %s 1>/dev/null' % file
  1213. elif file.endswith('.zip') or file.endswith('.jar'):
  1214. try:
  1215. dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
  1216. except ValueError as exc:
  1217. bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
  1218. (file, urldata.parm.get('dos')))
  1219. cmd = 'unzip -q -o'
  1220. if dos:
  1221. cmd = '%s -a' % cmd
  1222. cmd = "%s '%s'" % (cmd, file)
  1223. elif file.endswith('.rpm') or file.endswith('.srpm'):
  1224. if 'extract' in urldata.parm:
  1225. unpack_file = urldata.parm.get('extract')
  1226. cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
  1227. iterate = True
  1228. iterate_file = unpack_file
  1229. else:
  1230. cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
  1231. elif file.endswith('.deb') or file.endswith('.ipk'):
  1232. output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
  1233. datafile = None
  1234. if output:
  1235. for line in output.decode().splitlines():
  1236. if line.startswith('data.tar.'):
  1237. datafile = line
  1238. break
  1239. else:
  1240. raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
  1241. else:
  1242. raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
  1243. cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
  1244. # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
  1245. if 'subdir' in urldata.parm:
  1246. subdir = urldata.parm.get('subdir')
  1247. if os.path.isabs(subdir):
  1248. if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
  1249. raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
  1250. unpackdir = subdir
  1251. else:
  1252. unpackdir = os.path.join(rootdir, subdir)
  1253. bb.utils.mkdirhier(unpackdir)
  1254. else:
  1255. unpackdir = rootdir
  1256. if not unpack or not cmd:
  1257. # If file == dest, then avoid any copies, as we already put the file into dest!
  1258. dest = os.path.join(unpackdir, os.path.basename(file))
  1259. if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
  1260. destdir = '.'
  1261. # For file:// entries all intermediate dirs in path must be created at destination
  1262. if urldata.type == "file":
  1263. # Trailing '/' does a copying to wrong place
  1264. urlpath = urldata.path.rstrip('/')
  1265. # Want files places relative to cwd so no leading '/'
  1266. urlpath = urlpath.lstrip('/')
  1267. if urlpath.find("/") != -1:
  1268. destdir = urlpath.rsplit("/", 1)[0] + '/'
  1269. bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
  1270. cmd = 'cp -fpPRH %s %s' % (file, destdir)
  1271. if not cmd:
  1272. return
  1273. path = data.getVar('PATH')
  1274. if path:
  1275. cmd = "PATH=\"%s\" %s" % (path, cmd)
  1276. bb.note("Unpacking %s to %s/" % (file, unpackdir))
  1277. ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
  1278. if ret != 0:
  1279. raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
  1280. if iterate is True:
  1281. iterate_urldata = urldata
  1282. iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
  1283. self.unpack(urldata, rootdir, data)
  1284. return
  1285. def clean(self, urldata, d):
  1286. """
  1287. Clean any existing full or partial download
  1288. """
  1289. bb.utils.remove(urldata.localpath)
  1290. def try_premirror(self, urldata, d):
  1291. """
  1292. Should premirrors be used?
  1293. """
  1294. return True
  1295. def checkstatus(self, fetch, urldata, d):
  1296. """
  1297. Check the status of a URL
  1298. Assumes localpath was called first
  1299. """
  1300. logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
  1301. return True
  1302. def latest_revision(self, ud, d, name):
  1303. """
  1304. Look in the cache for the latest revision, if not present ask the SCM.
  1305. """
  1306. if not hasattr(self, "_latest_revision"):
  1307. raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
  1308. revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
  1309. key = self.generate_revision_key(ud, d, name)
  1310. try:
  1311. return revs[key]
  1312. except KeyError:
  1313. revs[key] = rev = self._latest_revision(ud, d, name)
  1314. return rev
  1315. def sortable_revision(self, ud, d, name):
  1316. latest_rev = self._build_revision(ud, d, name)
  1317. return True, str(latest_rev)
  1318. def generate_revision_key(self, ud, d, name):
  1319. key = self._revision_key(ud, d, name)
  1320. return "%s-%s" % (key, d.getVar("PN") or "")
  1321. def latest_versionstring(self, ud, d):
  1322. """
  1323. Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
  1324. by searching through the tags output of ls-remote, comparing
  1325. versions and returning the highest match as a (version, revision) pair.
  1326. """
  1327. return ('', '')
  1328. class Fetch(object):
  1329. def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
  1330. if localonly and cache:
  1331. raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
  1332. if len(urls) == 0:
  1333. urls = d.getVar("SRC_URI").split()
  1334. self.urls = urls
  1335. self.d = d
  1336. self.ud = {}
  1337. self.connection_cache = connection_cache
  1338. fn = d.getVar('FILE')
  1339. mc = d.getVar('__BBMULTICONFIG') or ""
  1340. if cache and fn and mc + fn in urldata_cache:
  1341. self.ud = urldata_cache[mc + fn]
  1342. for url in urls:
  1343. if url not in self.ud:
  1344. try:
  1345. self.ud[url] = FetchData(url, d, localonly)
  1346. except NonLocalMethod:
  1347. if localonly:
  1348. self.ud[url] = None
  1349. pass
  1350. if fn and cache:
  1351. urldata_cache[mc + fn] = self.ud
  1352. def localpath(self, url):
  1353. if url not in self.urls:
  1354. self.ud[url] = FetchData(url, self.d)
  1355. self.ud[url].setup_localpath(self.d)
  1356. return self.d.expand(self.ud[url].localpath)
  1357. def localpaths(self):
  1358. """
  1359. Return a list of the local filenames, assuming successful fetch
  1360. """
  1361. local = []
  1362. for u in self.urls:
  1363. ud = self.ud[u]
  1364. ud.setup_localpath(self.d)
  1365. local.append(ud.localpath)
  1366. return local
  1367. def download(self, urls=None):
  1368. """
  1369. Fetch all urls
  1370. """
  1371. if not urls:
  1372. urls = self.urls
  1373. network = self.d.getVar("BB_NO_NETWORK")
  1374. premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
  1375. for u in urls:
  1376. ud = self.ud[u]
  1377. ud.setup_localpath(self.d)
  1378. m = ud.method
  1379. localpath = ""
  1380. if ud.lockfile:
  1381. lf = bb.utils.lockfile(ud.lockfile)
  1382. try:
  1383. self.d.setVar("BB_NO_NETWORK", network)
  1384. if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
  1385. localpath = ud.localpath
  1386. elif m.try_premirror(ud, self.d):
  1387. logger.debug(1, "Trying PREMIRRORS")
  1388. mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
  1389. localpath = try_mirrors(self, self.d, ud, mirrors, False)
  1390. if localpath:
  1391. try:
  1392. # early checksum verification so that if the checksum of the premirror
  1393. # contents mismatch the fetcher can still try upstream and mirrors
  1394. update_stamp(ud, self.d)
  1395. except ChecksumError as e:
  1396. logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
  1397. logger.debug(1, str(e))
  1398. localpath = ""
  1399. if premirroronly:
  1400. self.d.setVar("BB_NO_NETWORK", "1")
  1401. firsterr = None
  1402. verified_stamp = verify_donestamp(ud, self.d)
  1403. if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
  1404. try:
  1405. if not trusted_network(self.d, ud.url):
  1406. raise UntrustedUrl(ud.url)
  1407. logger.debug(1, "Trying Upstream")
  1408. m.download(ud, self.d)
  1409. if hasattr(m, "build_mirror_data"):
  1410. m.build_mirror_data(ud, self.d)
  1411. localpath = ud.localpath
  1412. # early checksum verify, so that if checksum mismatched,
  1413. # fetcher still have chance to fetch from mirror
  1414. update_stamp(ud, self.d)
  1415. except bb.fetch2.NetworkAccess:
  1416. raise
  1417. except BBFetchException as e:
  1418. if isinstance(e, ChecksumError):
  1419. logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
  1420. logger.debug(1, str(e))
  1421. if os.path.exists(ud.localpath):
  1422. rename_bad_checksum(ud, e.checksum)
  1423. elif isinstance(e, NoChecksumError):
  1424. raise
  1425. else:
  1426. logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
  1427. logger.debug(1, str(e))
  1428. firsterr = e
  1429. # Remove any incomplete fetch
  1430. if not verified_stamp:
  1431. m.clean(ud, self.d)
  1432. logger.debug(1, "Trying MIRRORS")
  1433. mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
  1434. localpath = try_mirrors(self, self.d, ud, mirrors)
  1435. if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
  1436. if firsterr:
  1437. logger.error(str(firsterr))
  1438. raise FetchError("Unable to fetch URL from any source.", u)
  1439. update_stamp(ud, self.d)
  1440. except IOError as e:
  1441. if e.errno in [errno.ESTALE]:
  1442. logger.error("Stale Error Observed %s." % u)
  1443. raise ChecksumError("Stale Error Detected")
  1444. except BBFetchException as e:
  1445. if isinstance(e, ChecksumError):
  1446. logger.error("Checksum failure fetching %s" % u)
  1447. raise
  1448. finally:
  1449. if ud.lockfile:
  1450. bb.utils.unlockfile(lf)
  1451. def checkstatus(self, urls=None):
  1452. """
  1453. Check all urls exist upstream
  1454. """
  1455. if not urls:
  1456. urls = self.urls
  1457. for u in urls:
  1458. ud = self.ud[u]
  1459. ud.setup_localpath(self.d)
  1460. m = ud.method
  1461. logger.debug(1, "Testing URL %s", u)
  1462. # First try checking uri, u, from PREMIRRORS
  1463. mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
  1464. ret = try_mirrors(self, self.d, ud, mirrors, True)
  1465. if not ret:
  1466. # Next try checking from the original uri, u
  1467. ret = m.checkstatus(self, ud, self.d)
  1468. if not ret:
  1469. # Finally, try checking uri, u, from MIRRORS
  1470. mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
  1471. ret = try_mirrors(self, self.d, ud, mirrors, True)
  1472. if not ret:
  1473. raise FetchError("URL %s doesn't work" % u, u)
  1474. def unpack(self, root, urls=None):
  1475. """
  1476. Unpack urls to root
  1477. """
  1478. if not urls:
  1479. urls = self.urls
  1480. for u in urls:
  1481. ud = self.ud[u]
  1482. ud.setup_localpath(self.d)
  1483. if ud.lockfile:
  1484. lf = bb.utils.lockfile(ud.lockfile)
  1485. ud.method.unpack(ud, root, self.d)
  1486. if ud.lockfile:
  1487. bb.utils.unlockfile(lf)
  1488. def clean(self, urls=None):
  1489. """
  1490. Clean files that the fetcher gets or places
  1491. """
  1492. if not urls:
  1493. urls = self.urls
  1494. for url in urls:
  1495. if url not in self.ud:
  1496. self.ud[url] = FetchData(url, self.d)
  1497. ud = self.ud[url]
  1498. ud.setup_localpath(self.d)
  1499. if not ud.localfile and ud.localpath is None:
  1500. continue
  1501. if ud.lockfile:
  1502. lf = bb.utils.lockfile(ud.lockfile)
  1503. ud.method.clean(ud, self.d)
  1504. if ud.donestamp:
  1505. bb.utils.remove(ud.donestamp)
  1506. if ud.lockfile:
  1507. bb.utils.unlockfile(lf)
  1508. class FetchConnectionCache(object):
  1509. """
  1510. A class which represents an container for socket connections.
  1511. """
  1512. def __init__(self):
  1513. self.cache = {}
  1514. def get_connection_name(self, host, port):
  1515. return host + ':' + str(port)
  1516. def add_connection(self, host, port, connection):
  1517. cn = self.get_connection_name(host, port)
  1518. if cn not in self.cache:
  1519. self.cache[cn] = connection
  1520. def get_connection(self, host, port):
  1521. connection = None
  1522. cn = self.get_connection_name(host, port)
  1523. if cn in self.cache:
  1524. connection = self.cache[cn]
  1525. return connection
  1526. def remove_connection(self, host, port):
  1527. cn = self.get_connection_name(host, port)
  1528. if cn in self.cache:
  1529. self.cache[cn].close()
  1530. del self.cache[cn]
  1531. def close_connections(self):
  1532. for cn in list(self.cache.keys()):
  1533. self.cache[cn].close()
  1534. del self.cache[cn]
  1535. from . import cvs
  1536. from . import git
  1537. from . import gitsm
  1538. from . import gitannex
  1539. from . import local
  1540. from . import svn
  1541. from . import wget
  1542. from . import ssh
  1543. from . import sftp
  1544. from . import s3
  1545. from . import perforce
  1546. from . import bzr
  1547. from . import hg
  1548. from . import osc
  1549. from . import repo
  1550. from . import clearcase
  1551. from . import npm
  1552. methods.append(local.Local())
  1553. methods.append(wget.Wget())
  1554. methods.append(svn.Svn())
  1555. methods.append(git.Git())
  1556. methods.append(gitsm.GitSM())
  1557. methods.append(gitannex.GitANNEX())
  1558. methods.append(cvs.Cvs())
  1559. methods.append(ssh.SSH())
  1560. methods.append(sftp.SFTP())
  1561. methods.append(s3.S3())
  1562. methods.append(perforce.Perforce())
  1563. methods.append(bzr.Bzr())
  1564. methods.append(hg.Hg())
  1565. methods.append(osc.Osc())
  1566. methods.append(repo.Repo())
  1567. methods.append(clearcase.ClearCase())
  1568. methods.append(npm.Npm())