This could help you, if my understanding is correct you have a bunch of zip files you like to download... and check the current status ofc you could limit this print statements to 25min if you want or every X amount of mb
.
import requests
url_list = ["http://file-examples.com/wp-content/uploads/2017/02/zip_10MB.zip", "http://file-examples.com/wp-content/uploads/2017/02/zip_10MB.zip", "http://file-examples.com/wp-content/uploads/2017/02/zip_10MB.zip", "http://file-examplesc.com/wp-content/uploads/2017/02/zip_10MB.zipdd", "http://file-examples.com/wp-content/uploads/2017/02/zip_10MB.zip"]
def download_file(url, total_download_mb):
local_filename = url.split('/')[-1]
with requests.get(url, stream=True) as r:
filesize = int(r.headers["Content-Length"]) / 1024 / 1024
downloaded = 0
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
downloaded = (downloaded + len(chunk))
downloaded_mb = downloaded/1024/1024
print("%.2fmb / %.2fmb downloaded." % (downloaded_mb ,filesize))
total_download_mb += downloaded_mb
#download is finished could be unpacked ....
return total_download_mb
def download_url_list(url_list):
total_download_mb = 0
failed_urls = []
for i, url in enumerate(url_list):
try:
total_download_mb = download_file(url, total_download_mb)
print("Total download: %.2fmb" % total_download_mb)
except:
failed_urls.append(url_list[i])
print("failed by file:" + str(i))
print("failed downloads")
print(failed_urls)
download_url_list(url_list)