test_cert_reload fails with SSLError on macOS and Linux
The test_cert_reload
unit test for this package is failing for me on macOS (catalina, see below) and Linux (Azure pipelines) with the same error:
requests.exceptions.SSLError: CertReloadingHTTPSConnectionPool(host='localhost', port=56826): Max retries exceeded with url: / (Caused by SSLError(SSLError("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])")))
Full traceback
$ git rev-parse HEAD
2794d3ba37f87f4275ac66fd55a7a2611e1c2d9e
$ python setup.py test --addopts "-k test_cert_reload"
running pytest
running egg_info
writing ligo_requests.egg-info/PKG-INFO
writing dependency_links to ligo_requests.egg-info/dependency_links.txt
writing namespace_packages to ligo_requests.egg-info/namespace_packages.txt
writing requirements to ligo_requests.egg-info/requires.txt
writing top-level names to ligo_requests.egg-info/top_level.txt
reading manifest file 'ligo_requests.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
writing manifest file 'ligo_requests.egg-info/SOURCES.txt'
running build_ext
======================================= test session starts ========================================
platform darwin -- Python 3.8.0, pytest-5.3.2, py-1.8.1, pluggy-0.13.1
rootdir: /Users/duncan/git/ligo-requests
plugins: socket-0.3.3, httpserver-0.3.4, freezegun-0.3.0.post1
collected 16 items / 15 deselected / 1 selected
ligo/requests/tests/test_cert_reload.py F [100%]
============================================= FAILURES =============================================
_________________________________________ test_cert_reload _________________________________________
self = <urllib3.contrib.pyopenssl.PyOpenSSLContext object at 0x105cec670>
sock = <socket.socket fd=15, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('127.0.0.1', 56828), raddr=('127.0.0.1', 56826)>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = b'localhost'
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
server_hostname = server_hostname.encode("utf-8")
if server_hostname is not None:
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
> cnx.do_handshake()
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/contrib/pyopenssl.py:485:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <OpenSSL.SSL.Connection object at 0x105cecf70>
def do_handshake(self):
"""
Perform an SSL handshake (usually called after :meth:`renegotiate` or
one of :meth:`set_accept_state` or :meth:`set_connect_state`). This can
raise the same exceptions as :meth:`send` and :meth:`recv`.
:return: None.
"""
result = _lib.SSL_do_handshake(self._ssl)
> self._raise_ssl_error(self._ssl, result)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/OpenSSL/SSL.py:1934:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <OpenSSL.SSL.Connection object at 0x105cecf70>, ssl = <cdata 'SSL *' 0x7f942b952c00>
result = -1
def _raise_ssl_error(self, ssl, result):
if self._context._verify_helper is not None:
self._context._verify_helper.raise_if_problem()
if self._context._npn_advertise_helper is not None:
self._context._npn_advertise_helper.raise_if_problem()
if self._context._npn_select_helper is not None:
self._context._npn_select_helper.raise_if_problem()
if self._context._alpn_select_helper is not None:
self._context._alpn_select_helper.raise_if_problem()
if self._context._ocsp_helper is not None:
self._context._ocsp_helper.raise_if_problem()
error = _lib.SSL_get_error(ssl, result)
if error == _lib.SSL_ERROR_WANT_READ:
raise WantReadError()
elif error == _lib.SSL_ERROR_WANT_WRITE:
raise WantWriteError()
elif error == _lib.SSL_ERROR_ZERO_RETURN:
raise ZeroReturnError()
elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP:
# TODO: This is untested.
raise WantX509LookupError()
elif error == _lib.SSL_ERROR_SYSCALL:
if _lib.ERR_peek_error() == 0:
if result < 0:
if platform == "win32":
errno = _ffi.getwinerror()[0]
else:
errno = _ffi.errno
if errno != 0:
raise SysCallError(errno, errorcode.get(errno))
raise SysCallError(-1, "Unexpected EOF")
else:
# TODO: This is untested.
_raise_current_error()
elif error == _lib.SSL_ERROR_NONE:
pass
else:
> _raise_current_error()
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/OpenSSL/SSL.py:1671:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
exception_type = <class 'OpenSSL.SSL.Error'>
def exception_from_error_queue(exception_type):
"""
Convert an OpenSSL library failure into a Python exception.
When a call to the native OpenSSL library fails, this is usually signalled
by the return value, and an error code is stored in an error queue
associated with the current thread. The err library provides functions to
obtain these error codes and textual error messages.
"""
errors = []
while True:
error = lib.ERR_get_error()
if error == 0:
break
errors.append((
text(lib.ERR_lib_error_string(error)),
text(lib.ERR_func_error_string(error)),
text(lib.ERR_reason_error_string(error))))
> raise exception_type(errors)
E OpenSSL.SSL.Error: [('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')]
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/OpenSSL/_util.py:54: Error
During handling of the above exception, another exception occurred:
self = <ligo.requests.cert_reload.CertReloadingHTTPSConnectionPool object at 0x105cda280>
method = 'GET', url = '/', body = None
headers = {'User-Agent': 'ligo.requests/0.0.1+1.g2794d3b', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None), redirect = False
assert_same_host = False, timeout = <urllib3.util.timeout.Timeout object at 0x105cda340>
pool_timeout = None, release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}, conn = None
release_this_conn = True, err = None, clean_exit = False
timeout_obj = <urllib3.util.timeout.Timeout object at 0x105cec340>, is_new_proxy_conn = False
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
assert_same_host=True,
timeout=_Default,
pool_timeout=None,
release_conn=None,
chunked=False,
body_pos=None,
**response_kw
):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
url = six.ensure_str(parse_url(url).url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == "http":
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
> httplib_response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/connectionpool.py:665:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ligo.requests.cert_reload.CertReloadingHTTPSConnectionPool object at 0x105cda280>
conn = <ligo.requests.cert_reload.CertReloadingHTTPSConnection object at 0x105cec4c0>
method = 'GET', url = '/', timeout = <urllib3.util.timeout.Timeout object at 0x105cec340>
chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'ligo.requests/0.0.1+1.g2794d3b', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}}
timeout_obj = <urllib3.util.timeout.Timeout object at 0x105cec640>
def _make_request(
self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
> self._validate_conn(conn)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/connectionpool.py:376:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ligo.requests.cert_reload.CertReloadingHTTPSConnectionPool object at 0x105cda280>
conn = <ligo.requests.cert_reload.CertReloadingHTTPSConnection object at 0x105cec4c0>
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
> conn.connect()
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/connectionpool.py:994:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ligo.requests.cert_reload.CertReloadingHTTPSConnection object at 0x105cec4c0>
def connect(self):
if self.cert_file:
cert = load_certificate(self.cert_file)
self._not_valid_after = cert.not_valid_after
> super(CertReloadingHTTPSConnection, self).connect()
ligo/requests/cert_reload.py:37:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ligo.requests.cert_reload.CertReloadingHTTPSConnection object at 0x105cec4c0>
def connect(self):
# Add certificate verification
conn = self._new_conn()
hostname = self.host
# Google App Engine's httplib does not define _tunnel_host
if getattr(self, "_tunnel_host", None):
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
server_hostname = hostname
if self.server_hostname is not None:
server_hostname = self.server_hostname
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn(
(
"System time is way off (before {0}). This will probably "
"lead to SSL verification errors"
).format(RECENT_DATE),
SystemTimeWarning,
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
default_ssl_context = False
if self.ssl_context is None:
default_ssl_context = True
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(self.ssl_version),
cert_reqs=resolve_cert_reqs(self.cert_reqs),
)
context = self.ssl_context
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
# Try to load OS default certs if none are given.
# Works well on Windows (requires Python3.4+)
if (
not self.ca_certs
and not self.ca_cert_dir
and default_ssl_context
and hasattr(context, "load_default_certs")
):
context.load_default_certs()
> self.sock = ssl_wrap_socket(
sock=conn,
keyfile=self.key_file,
certfile=self.cert_file,
key_password=self.key_password,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
server_hostname=server_hostname,
ssl_context=context,
)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/connection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
sock = <socket.socket fd=15, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('127.0.0.1', 56828), raddr=('127.0.0.1', 56826)>
keyfile = '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/client_key.pem'
certfile = '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/client_cert.pem'
cert_reqs = None
ca_certs = '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/server_cert.pem'
server_hostname = 'localhost', ssl_version = None, ciphers = None
ssl_context = <urllib3.contrib.pyopenssl.PyOpenSSLContext object at 0x105cec670>, ca_cert_dir = None
key_password = None
def ssl_wrap_socket(
sock,
keyfile=None,
certfile=None,
cert_reqs=None,
ca_certs=None,
server_hostname=None,
ssl_version=None,
ciphers=None,
ssl_context=None,
ca_cert_dir=None,
key_password=None,
):
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
the same meaning as they do when using :func:`ssl.wrap_socket`.
:param server_hostname:
When SNI is supported, the expected hostname of the certificate
:param ssl_context:
A pre-made :class:`SSLContext` object. If none is provided, one will
be created using :func:`create_urllib3_context`.
:param ciphers:
A string of ciphers we wish the client to support.
:param ca_cert_dir:
A directory containing CA certificates in multiple separate files, as
supported by OpenSSL's -CApath flag or the capath argument to
SSLContext.load_verify_locations().
:param key_password:
Optional password if the keyfile is encrypted.
"""
context = ssl_context
if context is None:
# Note: This branch of code and all the variables in it are no longer
# used by urllib3 itself. We should consider deprecating and removing
# this code.
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
if ca_certs or ca_cert_dir:
try:
context.load_verify_locations(ca_certs, ca_cert_dir)
except IOError as e: # Platform-specific: Python 2.7
raise SSLError(e)
# Py33 raises FileNotFoundError which subclasses OSError
# These are not equivalent unless we check the errno attribute
except OSError as e: # Platform-specific: Python 3.3 and beyond
if e.errno == errno.ENOENT:
raise SSLError(e)
raise
elif ssl_context is None and hasattr(context, "load_default_certs"):
# try to load OS default certs; works well on Windows (require Python3.4+)
context.load_default_certs()
# Attempt to detect if we get the goofy behavior of the
# keyfile being encrypted and OpenSSL asking for the
# passphrase via the terminal and instead error out.
if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
raise SSLError("Client private key is encrypted, password is required")
if certfile:
if key_password is None:
context.load_cert_chain(certfile, keyfile)
else:
context.load_cert_chain(certfile, keyfile, key_password)
# If we detect server_hostname is an IP address then the SNI
# extension should not be used according to RFC3546 Section 3.1
# We shouldn't warn the user if SNI isn't available but we would
# not be using SNI anyways due to IP address for server_hostname.
if (
server_hostname is not None and not is_ipaddress(server_hostname)
) or IS_SECURETRANSPORT:
if HAS_SNI and server_hostname is not None:
> return context.wrap_socket(sock, server_hostname=server_hostname)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/util/ssl_.py:370:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <urllib3.contrib.pyopenssl.PyOpenSSLContext object at 0x105cec670>
sock = <socket.socket fd=15, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('127.0.0.1', 56828), raddr=('127.0.0.1', 56826)>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = b'localhost'
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
server_hostname = server_hostname.encode("utf-8")
if server_hostname is not None:
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(sock, sock.gettimeout()):
raise timeout("select timed out")
continue
except OpenSSL.SSL.Error as e:
> raise ssl.SSLError("bad handshake: %r" % e)
E ssl.SSLError: ("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])",)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/contrib/pyopenssl.py:491: SSLError
During handling of the above exception, another exception occurred:
self = <ligo.requests.cert_reload.CertReloadingHTTPAdapter object at 0x105b97a90>
request = <PreparedRequest [GET]>, stream = False
timeout = <urllib3.util.timeout.Timeout object at 0x105cda340>
verify = '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/server_cert.pem'
cert = ('/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/client_cert.pem', '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/client_key.pem')
proxies = OrderedDict()
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
> resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/requests/adapters.py:439:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ligo.requests.cert_reload.CertReloadingHTTPSConnectionPool object at 0x105cda280>
method = 'GET', url = '/', body = None
headers = {'User-Agent': 'ligo.requests/0.0.1+1.g2794d3b', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None), redirect = False
assert_same_host = False, timeout = <urllib3.util.timeout.Timeout object at 0x105cda340>
pool_timeout = None, release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}, conn = None
release_this_conn = True, err = None, clean_exit = False
timeout_obj = <urllib3.util.timeout.Timeout object at 0x105cec340>, is_new_proxy_conn = False
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
assert_same_host=True,
timeout=_Default,
pool_timeout=None,
release_conn=None,
chunked=False,
body_pos=None,
**response_kw
):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
url = six.ensure_str(parse_url(url).url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == "http":
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Pass method to Response for length checking
response_kw["request_method"] = method
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(
httplib_response,
pool=self,
connection=response_conn,
retries=retries,
**response_kw
)
# Everything went great!
clean_exit = True
except queue.Empty:
# Timed out by queue.
raise EmptyPoolError(self, "No pool connections are available.")
except (
TimeoutError,
HTTPException,
SocketError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
if isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError("Connection aborted.", e)
> retries = retries.increment(
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/connectionpool.py:719:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = Retry(total=0, connect=None, read=False, redirect=None, status=None), method = 'GET'
url = '/', response = None
error = SSLError(SSLError("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])"))
_pool = <ligo.requests.cert_reload.CertReloadingHTTPSConnectionPool object at 0x105cda280>
_stacktrace = <traceback object at 0x105d32400>
def increment(
self,
method=None,
url=None,
response=None,
error=None,
_pool=None,
_stacktrace=None,
):
""" Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
return a response.
:type response: :class:`~urllib3.response.HTTPResponse`
:param Exception error: An error encountered during the request, or
None if the response was received successfully.
:return: A new ``Retry`` object.
"""
if self.total is False and error:
# Disabled, indicate to re-raise the error.
raise six.reraise(type(error), error, _stacktrace)
total = self.total
if total is not None:
total -= 1
connect = self.connect
read = self.read
redirect = self.redirect
status_count = self.status
cause = "unknown"
status = None
redirect_location = None
if error and self._is_connection_error(error):
# Connect retry?
if connect is False:
raise six.reraise(type(error), error, _stacktrace)
elif connect is not None:
connect -= 1
elif error and self._is_read_error(error):
# Read retry?
if read is False or not self._is_method_retryable(method):
raise six.reraise(type(error), error, _stacktrace)
elif read is not None:
read -= 1
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
redirect -= 1
cause = "too many redirects"
redirect_location = response.get_redirect_location()
status = response.status
else:
# Incrementing because of a server error like a 500 in
# status_forcelist and a the given method is in the whitelist
cause = ResponseError.GENERIC_ERROR
if response and response.status:
if status_count is not None:
status_count -= 1
cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
status = response.status
history = self.history + (
RequestHistory(method, url, error, status, redirect_location),
)
new_retry = self.new(
total=total,
connect=connect,
read=read,
redirect=redirect,
status=status_count,
history=history,
)
if new_retry.is_exhausted():
> raise MaxRetryError(_pool, url, error or ResponseError(cause))
E urllib3.exceptions.MaxRetryError: CertReloadingHTTPSConnectionPool(host='localhost', port=56826): Max retries exceeded with url: / (Caused by SSLError(SSLError("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])")))
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/urllib3/util/retry.py:436: MaxRetryError
During handling of the above exception, another exception occurred:
client = <ligo.requests.Session object at 0x105b97730>
server = <pytest_httpserver.httpserver.HTTPServer object at 0x105b4ad60>
freezer = <freezegun.api.FrozenDateTimeFactory object at 0x105b97820>
def test_cert_reload(client, server, freezer):
url = server.url_for('/')
# Test 1: significantly before expiration time, still valid
freezer.move_to('2019-01-02')
> assert client.get(url).json() == {'foo': 'bar'}
ligo/requests/tests/test_cert_reload.py:159:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/requests/sessions.py:546: in get
return self.request('GET', url, **kwargs)
ligo/requests/file.py:72: in request
return super(SessionFileMixin, self).request(
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/requests/sessions.py:533: in request
resp = self.send(prep, **send_kwargs)
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/requests/sessions.py:646: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ligo.requests.cert_reload.CertReloadingHTTPAdapter object at 0x105b97a90>
request = <PreparedRequest [GET]>, stream = False
timeout = <urllib3.util.timeout.Timeout object at 0x105cda340>
verify = '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/server_cert.pem'
cert = ('/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/client_cert.pem', '/private/var/folders/xh/jdrqg2bx3s5f4lkq0rf2903c0000gq/T/pytest-of-duncan/pytest-4/test_cert_reload0/client_key.pem')
proxies = OrderedDict()
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
# Receive the response from the server
try:
# For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
> raise SSLError(e, request=request)
E requests.exceptions.SSLError: CertReloadingHTTPSConnectionPool(host='localhost', port=56826): Max retries exceeded with url: / (Caused by SSLError(SSLError("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])")))
../../opt/miniconda3/envs/ligo-requests/lib/python3.8/site-packages/requests/adapters.py:514: SSLError
================================= 1 failed, 15 deselected in 1.16s =================================
Conda package list
# packages in environment at /Users/duncan/opt/miniconda3/envs/ligo-requests:
#
# Name Version Build Channel
attrs 19.3.0 py_0 conda-forge
ca-certificates 2019.11.28 hecc5488_0 conda-forge
certifi 2019.11.28 py38_0 conda-forge
cffi 1.13.2 py38h33e799b_0 conda-forge
chardet 3.0.4 py38_1003 conda-forge
cryptography 2.8 py38hafa8578_1 conda-forge
freezegun 0.3.12 py_0 conda-forge
idna 2.8 py38_1000 conda-forge
importlib_metadata 1.3.0 py38_0 conda-forge
libcxx 9.0.0 h89e68fa_1 conda-forge
libffi 3.2.1 h6de7cb9_1006 conda-forge
ligo-common 1.0.4 py_0 conda-forge
more-itertools 8.0.2 py_0 conda-forge
ncurses 6.1 h0a44026_1002 conda-forge
openssl 1.1.1d h0b31af3_0 conda-forge
packaging 19.2 py_0 conda-forge
pip 19.3.1 py38_0 conda-forge
pluggy 0.13.1 py38_0 conda-forge
py 1.8.1 py_0 conda-forge
pycparser 2.19 py38_1 conda-forge
pyopenssl 19.1.0 py38_0 conda-forge
pyparsing 2.4.6 py_0 conda-forge
pysocks 1.7.1 py38_0 conda-forge
pytest 5.3.2 py38_0 conda-forge
pytest-freezegun 0.3.0.post1 py_0 conda-forge
pytest-httpserver 0.3.4 py_0 conda-forge
pytest-socket 0.3.3 py_0 conda-forge
python 3.8.0 hd366da7_5 conda-forge
python-dateutil 2.8.1 py_0 conda-forge
readline 8.0 hcfe32e1_0 conda-forge
requests 2.22.0 py38_1 conda-forge
safe-netrc 0.0.1 py_1001 conda-forge
setuptools 44.0.0 py38_0 conda-forge
six 1.13.0 py38_0 conda-forge
sqlite 3.30.1 h93121df_0 conda-forge
tk 8.6.10 hbbe82c9_0 conda-forge
urllib3 1.25.7 py38_0 conda-forge
wcwidth 0.1.7 py_1 conda-forge
werkzeug 0.16.0 py_0 conda-forge
wheel 0.33.6 py38_0 conda-forge
xz 5.2.4 h1de35cc_1001 conda-forge
zipp 0.6.0 py_0 conda-forge
zlib 1.2.11 h0b31af3_1006 conda-forge