download.py 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. import os
  2. import pycurl
  3. import sys
  4. import time
  5. import urllib2
  6. import hashlib
  7. from threading import Thread
  8. class Download(Thread):
  9. _dst_path = ""
  10. _is_finished = False
  11. _is_successful = False
  12. _errors = []
  13. _HashFunc = None
  14. _HashValue = ""
  15. def __init__(self, url, path, cookies=False, useragent=False):
  16. super(Download, self).__init__()
  17. self.url = url
  18. self.path = path
  19. self.useragent = useragent
  20. self.cookies = cookies
  21. self.downloaded = 0
  22. self.progress = { 'downloaded': 0, 'total': 0, 'percent': 0,'stopped':False }
  23. self.stop = False
  24. self.filename = ""
  25. def isFinished(self):
  26. return self._is_finished
  27. def isSuccessful(self):
  28. return self._is_successful
  29. def get_dest(self):
  30. return self._dst_path
  31. def get_errors(self):
  32. return self._errors
  33. def run(self):
  34. c = pycurl.Curl()
  35. c.setopt(pycurl.URL, self.url)
  36. c.setopt(pycurl.FOLLOWLOCATION, 1)
  37. c.setopt(pycurl.MAXREDIRS, 5)
  38. c.setopt(pycurl.NOBODY, 1)
  39. c.setopt(pycurl.CONNECTTIMEOUT, 10)
  40. if self.useragent:
  41. c.setopt(pycurl.USERAGENT, self.useragent)
  42. # add cookies, if available
  43. if self.cookies:
  44. c.setopt(pycurl.COOKIE, self.cookies)
  45. c.perform()
  46. realurl = c.getinfo(pycurl.EFFECTIVE_URL)
  47. self.filename = realurl.split("/")[-1].strip()
  48. c = pycurl.Curl()
  49. c.setopt(pycurl.CONNECTTIMEOUT, 10)
  50. c.setopt(pycurl.URL, realurl)
  51. c.setopt(pycurl.FOLLOWLOCATION, 0)
  52. c.setopt(pycurl.NOPROGRESS, False)
  53. c.setopt(pycurl.XFERINFOFUNCTION, self.getProgress)
  54. if self.useragent:
  55. c.setopt(pycurl.USERAGENT, self.useragent)
  56. # configure pycurl output file
  57. if self.path == False:
  58. self.path = os.getcwd()
  59. filepath = os.path.join(self.path, self.filename)
  60. if os.path.exists(filepath):## remove old file,restart download
  61. os.system("rm -rf " + filepath)
  62. f = open(filepath, "wb")
  63. else:
  64. f = open(filepath, "wb")
  65. c.setopt(pycurl.WRITEDATA, f)
  66. self._dst_path = filepath
  67. # add cookies, if available
  68. if self.cookies:
  69. c.setopt(pycurl.COOKIE, self.cookies)
  70. # download file
  71. try:
  72. c.perform()
  73. except pycurl.error, error:
  74. errno,errstr = error
  75. print("curl error: %s" % errstr)
  76. self._errors.append(errstr)
  77. self.stop = True
  78. self.progress["stopped"] = True
  79. finally:
  80. code = c.getinfo( c.RESPONSE_CODE )
  81. c.close()
  82. self._is_finished = True
  83. if self.progress["percent"] < 100:
  84. self._is_successful = False
  85. else:
  86. if self._HashFunc != None:
  87. hashed = self.hashlib_hash(self._HashFunc, self._dst_path)
  88. if hashed == self._HashValue:
  89. self._is_successful= True
  90. else:
  91. self._is_successful = False
  92. self._errors.append("hash failed")
  93. else:
  94. if code != 200:
  95. self._is_successful = False
  96. os.system("rm -rf " + self._dst_path) ## clear garbage file
  97. self._errors.append("response error %d " % code)
  98. else:
  99. self._is_successful = True ## 100% downloaded without hash check
  100. def getProgress(self,download_t, download_d, upload_t, upload_d):
  101. if download_t and download_d:
  102. self.progress['downloaded'] = download_d + self.downloaded
  103. self.progress['total'] = download_t + self.downloaded
  104. self.progress['percent'] = ( float(self.progress['downloaded']) / float(self.progress['total'])) * 100.0
  105. self.progress["stopped"] = False
  106. if self.stop:
  107. self.progress["stopped"] = True
  108. return 1
  109. def hashlib_hash(method,fname): #eg: method == hashlib.md5(),function pointer
  110. hash_ = method
  111. with open(fname, "rb") as f:
  112. for chunk in iter(lambda: f.read(4096), b""):
  113. hash_.update(chunk)
  114. return hash_.hexdigest()
  115. def add_hash_verification(self,method_name,method_value):
  116. if method_name == "md5":
  117. self._HashFunc = hashlib.md5()
  118. else:
  119. self._HashFunc = None
  120. self._HashValue = method_value
  121. def get_progress(self):
  122. return self.progress["percent"]
  123. def stop(self):
  124. self.stop = True
  125. def cancel(self):
  126. # sets the boolean to stop the thread.
  127. self.stop = True
  128. def main():
  129. from optparse import OptionParser
  130. parser = OptionParser(usage="%prog [options] <url>")
  131. parser.add_option( "-p", "--path", default=False, dest="path", help="download file to PATH", metavar="PATH")
  132. parser.add_option( "-c", "--cookies", default=False, dest="cookies", help="specify cookie(s)", metavar="COOKIES")
  133. opts, args = parser.parse_args()
  134. if len(args) == 0:
  135. parser.error("No url supplied")
  136. for url in args:
  137. print("Downloading: %s" % url)
  138. if opts.path:
  139. print("to: %s" % opts.path)
  140. else:
  141. print("to current directory")
  142. d = Download(url, opts.path, opts.cookies)
  143. d.start()
  144. last_downloaded = 0
  145. sleep_time = 0.05
  146. while 1:
  147. try:
  148. progress = d.progress['percent']
  149. download_dx = d.progress["downloaded"] - last_downloaded
  150. speed = float(download_dx) / ( sleep_time * 1000.0)
  151. last_downloaded = d.progress["downloaded"]
  152. if d.progress["stopped"] == True:
  153. break
  154. print("%.2f percent | %d of %d | %.1f KB/s" % (progress, d.progress['downloaded'], d.progress['total'], speed))
  155. if progress == 100:
  156. print("")
  157. print("Download complete: %s" % d.filename)
  158. break
  159. time.sleep(sleep_time)
  160. # tell thread to terminate on keyboard interrupt,
  161. # otherwise the process has to be killed manually
  162. except KeyboardInterrupt:
  163. d.cancel()
  164. break
  165. except:
  166. raise
  167. if __name__ == "__main__":
  168. main()