s3.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. # ex:ts=4:sw=4:sts=4:et
  2. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  3. """
  4. BitBake 'Fetch' implementation for Amazon AWS S3.
  5. Class for fetching files from Amazon S3 using the AWS Command Line Interface.
  6. The aws tool must be correctly installed and configured prior to use.
  7. """
  8. # Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com>
  9. #
  10. # Based in part on bb.fetch2.wget:
  11. # Copyright (C) 2003, 2004 Chris Larson
  12. #
  13. # SPDX-License-Identifier: GPL-2.0-only
  14. #
  15. # This program is free software; you can redistribute it and/or modify
  16. # it under the terms of the GNU General Public License version 2 as
  17. # published by the Free Software Foundation.
  18. #
  19. # This program is distributed in the hope that it will be useful,
  20. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  21. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22. # GNU General Public License for more details.
  23. #
  24. # You should have received a copy of the GNU General Public License along
  25. # with this program; if not, write to the Free Software Foundation, Inc.,
  26. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  27. #
  28. # Based on functions from the base bb module, Copyright 2003 Holger Schurig
  29. import os
  30. import bb
  31. import urllib.request, urllib.parse, urllib.error
  32. from bb.fetch2 import FetchMethod
  33. from bb.fetch2 import FetchError
  34. from bb.fetch2 import runfetchcmd
  35. class S3(FetchMethod):
  36. """Class to fetch urls via 'aws s3'"""
  37. def supports(self, ud, d):
  38. """
  39. Check to see if a given url can be fetched with s3.
  40. """
  41. return ud.type in ['s3']
  42. def recommends_checksum(self, urldata):
  43. return True
  44. def urldata_init(self, ud, d):
  45. if 'downloadfilename' in ud.parm:
  46. ud.basename = ud.parm['downloadfilename']
  47. else:
  48. ud.basename = os.path.basename(ud.path)
  49. ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
  50. ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
  51. def download(self, ud, d):
  52. """
  53. Fetch urls
  54. Assumes localpath was called first
  55. """
  56. cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
  57. bb.fetch2.check_network_access(d, cmd, ud.url)
  58. runfetchcmd(cmd, d)
  59. # Additional sanity checks copied from the wget class (although there
  60. # are no known issues which mean these are required, treat the aws cli
  61. # tool with a little healthy suspicion).
  62. if not os.path.exists(ud.localpath):
  63. raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath))
  64. if os.path.getsize(ud.localpath) == 0:
  65. os.remove(ud.localpath)
  66. raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path))
  67. return True
  68. def checkstatus(self, fetch, ud, d):
  69. """
  70. Check the status of a URL
  71. """
  72. cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path)
  73. bb.fetch2.check_network_access(d, cmd, ud.url)
  74. output = runfetchcmd(cmd, d)
  75. # "aws s3 ls s3://mybucket/foo" will exit with success even if the file
  76. # is not found, so check output of the command to confirm success.
  77. if not output:
  78. raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
  79. return True