diff --git a/11c32a684b20b92f800d3ffa670dc8773e22bb92.patch b/11c32a684b20b92f800d3ffa670dc8773e22bb92.patch new file mode 100644 index 0000000..69479e2 --- /dev/null +++ b/11c32a684b20b92f800d3ffa670dc8773e22bb92.patch @@ -0,0 +1,129 @@ +From 11c32a684b20b92f800d3ffa670dc8773e22bb92 Mon Sep 17 00:00:00 2001 +From: Chris Lalancette +Date: Thu, 2 Mar 2017 18:52:06 -0500 +Subject: [PATCH] Support file:// URLs again. + +The switch to requests broke support for file:// URLs. +Re-implement it here by implementing our own Adapter for +requests which allows file:// to work again. While in +here, I also fixed another problem having to do with +how things are printed out during the download; this +should look better now. + +Signed-off-by: Chris Lalancette +--- + oz/ozutil.py | 76 ++++++++++++++++++++++++++++++++++++++++++++++++++++-------- + 1 file changed, 66 insertions(+), 10 deletions(-) + +diff --git a/oz/ozutil.py b/oz/ozutil.py +index 2523ea7..09cd124 100644 +--- a/oz/ozutil.py ++++ b/oz/ozutil.py +@@ -30,6 +30,7 @@ + import time + import select + import contextlib ++import urllib + try: + import configparser + except ImportError: +@@ -744,6 +745,57 @@ def default_screenshot_dir(): + """ + return os.path.join(default_data_dir(), "screenshots") + ++class LocalFileAdapter(requests.adapters.BaseAdapter): ++ @staticmethod ++ def _chkpath(method, path): ++ """Return an HTTP status for the given filesystem path.""" ++ if method.lower() in ('put', 'delete'): ++ return 501, "Not Implemented" # TODO ++ elif method.lower() not in ('get', 'head', 'post'): ++ return 405, "Method Not Allowed" ++ elif os.path.isdir(path): ++ return 400, "Path Not A File" ++ elif not os.path.isfile(path): ++ return 404, "File Not Found" ++ elif not os.access(path, os.R_OK): ++ return 403, "Access Denied" ++ else: ++ return 200, "OK" ++ ++ def send(self, req, **kwargs): # pylint: disable=unused-argument ++ """Return the file specified by the given request ++ ++ @type req: C{PreparedRequest} ++ @todo: Should I bother filling `response.headers` and processing ++ If-Modified-Since and friends using `os.stat`? ++ """ ++ path = os.path.normcase(os.path.normpath(urllib.url2pathname(req.path_url))) ++ response = requests.Response() ++ ++ response.status_code, response.reason = self._chkpath(req.method, path) ++ if response.status_code == 200 and req.method.lower() != 'head': ++ try: ++ response.raw = open(path, 'rb') ++ except (OSError, IOError), err: ++ response.status_code = 500 ++ response.reason = str(err) ++ ++ if isinstance(req.url, bytes): ++ response.url = req.url.decode('utf-8') ++ else: ++ response.url = req.url ++ ++ response.headers['Content-Length'] = os.path.getsize(path) ++ response.headers['Accept-Ranges'] = 'bytes' ++ response.headers['Redirect-URL'] = req.url ++ response.request = req ++ response.connection = self ++ ++ return response ++ ++ def close(self): ++ pass ++ + def http_get_header(url, redirect=True): + """ + Function to get the HTTP headers from a URL. The available headers will be +@@ -755,11 +807,13 @@ def http_get_header(url, redirect=True): + 'Redirect-URL' will always be None in the redirect=True case, and may be + None in the redirect=True case if no redirects were required. + """ +- with contextlib.closing(requests.post(url, allow_redirects=redirect, stream=True, timeout=10)) as r: +- info = r.headers +- info['HTTP-Code'] = r.status_code ++ with requests.Session() as requests_session: ++ requests_session.mount('file://', LocalFileAdapter()) ++ response = requests_session.post(url, allow_redirects=redirect, stream=True, timeout=10) ++ info = response.headers ++ info['HTTP-Code'] = response.status_code + if not redirect: +- info['Redirect-URL'] = r.headers.get('Location') ++ info['Redirect-URL'] = response.headers.get('Location') + else: + info['Redirect-URL'] = None + +@@ -769,15 +823,17 @@ def http_download_file(url, fd, show_progress, logger): + """ + Function to download a file from url to file descriptor fd. + """ +- with contextlib.closing(requests.get(url, stream=True, allow_redirects=True)) as r: +- file_size = int(r.headers.get('Content-Length')) ++ with requests.Session() as requests_session: ++ requests_session.mount('file://', LocalFileAdapter()) ++ response = requests_session.get(url, stream=True, allow_redirects=True) ++ file_size = int(response.headers.get('Content-Length')) + chunk_size = 10*1024*1024 +- i = 0 +- for chunk in r.iter_content(chunk_size): +- i = i + 1 ++ done = 0 ++ for chunk in response.iter_content(chunk_size): + write_bytes_to_fd(fd, chunk) ++ done += len(chunk) + if show_progress: +- logger.debug("%dkB of %dkB" % ((i * chunk_size) / 1024, file_size / 1024)) ++ logger.debug("%dkB of %dkB" % (done / 1024, file_size / 1024)) + + def ftp_download_directory(server, username, password, basepath, destination): + """ diff --git a/32a4edd3124e7acb2dac02ff77aeea944313cbc2.patch b/32a4edd3124e7acb2dac02ff77aeea944313cbc2.patch new file mode 100644 index 0000000..933eb4c --- /dev/null +++ b/32a4edd3124e7acb2dac02ff77aeea944313cbc2.patch @@ -0,0 +1,146 @@ +From 32a4edd3124e7acb2dac02ff77aeea944313cbc2 Mon Sep 17 00:00:00 2001 +From: Patrick Uiterwijk +Date: Sat, 28 Jan 2017 21:33:13 +0000 +Subject: [PATCH] Replace pycurl with requests + +Signed-off-by: Patrick Uiterwijk +--- + oz/ozutil.py | 102 +++++++++++-------------------------------------------- + requirements.txt | 2 +- + 4 files changed, 22 insertions(+), 86 deletions(-) + +diff --git a/oz/ozutil.py b/oz/ozutil.py +index ade670f..2523ea7 100644 +--- a/oz/ozutil.py ++++ b/oz/ozutil.py +@@ -1,5 +1,5 @@ + # Copyright (C) 2010,2011 Chris Lalancette +-# Copyright (C) 2012-2016 Chris Lalancette ++# Copyright (C) 2012-2017 Chris Lalancette + + # This library is free software; you can redistribute it and/or + # modify it under the terms of the GNU Lesser General Public +@@ -25,10 +25,11 @@ + import errno + import stat + import shutil +-import pycurl ++import requests + import gzip + import time + import select ++import contextlib + try: + import configparser + except ImportError: +@@ -754,48 +755,13 @@ def http_get_header(url, redirect=True): + 'Redirect-URL' will always be None in the redirect=True case, and may be + None in the redirect=True case if no redirects were required. + """ +- info = {} +- def _header(buf): +- """ +- Internal function that is called back from pycurl perform() for +- header data. +- """ +- buf = buf.strip() +- if len(buf) == 0: +- return +- +- split = buf.split(':') +- if len(split) < 2: +- # not a valid header; skip +- return +- key = split[0].strip() +- value = split[1].strip() +- info[key] = value +- +- def _data(buf): +- """ +- Empty function that is called back from pycurl perform() for body data. +- """ +- pass +- +- c = pycurl.Curl() +- c.setopt(c.URL, url) +- c.setopt(c.NOBODY, True) +- c.setopt(c.HEADERFUNCTION, _header) +- c.setopt(c.HEADER, True) +- c.setopt(c.WRITEFUNCTION, _data) +- if redirect: +- c.setopt(c.FOLLOWLOCATION, True) +- c.perform() +- info['HTTP-Code'] = c.getinfo(c.HTTP_CODE) +- if info['HTTP-Code'] == 0: +- # if this was a file:/// URL, then the HTTP_CODE returned 0. +- # set it to 200 to be compatible with http +- info['HTTP-Code'] = 200 +- if not redirect: +- info['Redirect-URL'] = c.getinfo(c.REDIRECT_URL) +- +- c.close() ++ with contextlib.closing(requests.post(url, allow_redirects=redirect, stream=True, timeout=10)) as r: ++ info = r.headers ++ info['HTTP-Code'] = r.status_code ++ if not redirect: ++ info['Redirect-URL'] = r.headers.get('Location') ++ else: ++ info['Redirect-URL'] = None + + return info + +@@ -803,45 +769,15 @@ def http_download_file(url, fd, show_progress, logger): + """ + Function to download a file from url to file descriptor fd. + """ +- class Progress(object): +- """ +- Internal class to represent progress on the connection. This is only +- required so that we have somewhere to store the "last_mb" variable +- that is not global. +- """ +- def __init__(self): +- self.last_mb = -1 +- +- def progress(self, down_total, down_current, up_total, up_current): +- """ +- Function that is called back from the pycurl perform() method to +- update the progress information. +- """ +- if down_total == 0: +- return +- current_mb = int(down_current) / 10485760 +- if current_mb > self.last_mb or down_current == down_total: +- self.last_mb = current_mb +- logger.debug("%dkB of %dkB" % (down_current/1024, down_total/1024)) +- +- def _data(buf): +- """ +- Function that is called back from the pycurl perform() method to +- actually write data to disk. +- """ +- write_bytes_to_fd(fd, buf) +- +- progress = Progress() +- c = pycurl.Curl() +- c.setopt(c.URL, url) +- c.setopt(c.CONNECTTIMEOUT, 5) +- c.setopt(c.WRITEFUNCTION, _data) +- c.setopt(c.FOLLOWLOCATION, 1) +- if show_progress: +- c.setopt(c.NOPROGRESS, 0) +- c.setopt(c.PROGRESSFUNCTION, progress.progress) +- c.perform() +- c.close() ++ with contextlib.closing(requests.get(url, stream=True, allow_redirects=True)) as r: ++ file_size = int(r.headers.get('Content-Length')) ++ chunk_size = 10*1024*1024 ++ i = 0 ++ for chunk in r.iter_content(chunk_size): ++ i = i + 1 ++ write_bytes_to_fd(fd, chunk) ++ if show_progress: ++ logger.debug("%dkB of %dkB" % ((i * chunk_size) / 1024, file_size / 1024)) + + def ftp_download_directory(server, username, password, basepath, destination): + """