1 11
import os.path
2 11
import logging
3 11
import socket
4 11
from base64 import b64encode
5

6 11
from urllib3 import PoolManager, ProxyManager, proxy_from_url, Timeout
7 11
from urllib3.util.retry import Retry
8 11
from urllib3.util.ssl_ import (
9
    ssl, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION, DEFAULT_CIPHERS,
10
)
11 11
from urllib3.exceptions import SSLError as URLLib3SSLError
12 11
from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError
13 11
from urllib3.exceptions import ConnectTimeoutError as URLLib3ConnectTimeoutError
14 11
from urllib3.exceptions import NewConnectionError, ProtocolError, ProxyError
15 11
try:
16
    # Always import the original SSLContext, even if it has been patched
17 11
    from urllib3.contrib.pyopenssl import orig_util_SSLContext as SSLContext
18 11
except ImportError:
19 11
    from urllib3.util.ssl_ import SSLContext
20

21 11
import botocore.awsrequest
22 11
from botocore.vendored import six
23 11
from botocore.vendored.six.moves.urllib_parse import unquote
24 11
from botocore.compat import filter_ssl_warnings, urlparse
25 11
from botocore.exceptions import (
26
    ConnectionClosedError, EndpointConnectionError, HTTPClientError,
27
    ReadTimeoutError, ProxyConnectionError, ConnectTimeoutError, SSLError
28
)
29

30 11
filter_ssl_warnings()
31 11
logger = logging.getLogger(__name__)
32 11
DEFAULT_TIMEOUT = 60
33 11
MAX_POOL_CONNECTIONS = 10
34 11
DEFAULT_CA_BUNDLE = os.path.join(os.path.dirname(__file__), 'cacert.pem')
35

36 11
try:
37 11
    from certifi import where
38 0
except ImportError:
39 0
    def where():
40 0
        return DEFAULT_CA_BUNDLE
41

42

43 11
def get_cert_path(verify):
44 11
    if verify is not True:
45 11
        return verify
46

47 11
    return where()
48

49

50 11
def create_urllib3_context(ssl_version=None, cert_reqs=None,
51
                           options=None, ciphers=None):
52
    """ This function is a vendored version of the same function in urllib3
53

54
        We vendor this function to ensure that the SSL contexts we construct
55
        always use the std lib SSLContext instead of pyopenssl.
56
    """
57 11
    context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
58

59
    # Setting the default here, as we may have no ssl module on import
60 11
    cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
61

62 11
    if options is None:
63 11
        options = 0
64
        # SSLv2 is easily broken and is considered harmful and dangerous
65 11
        options |= OP_NO_SSLv2
66
        # SSLv3 has several problems and is now dangerous
67 11
        options |= OP_NO_SSLv3
68
        # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
69
        # (issue urllib3#309)
70 11
        options |= OP_NO_COMPRESSION
71

72 11
    context.options |= options
73

74 11
    if getattr(context, 'supports_set_ciphers', True):
75
        # Platform-specific: Python 2.6
76 11
        context.set_ciphers(ciphers or DEFAULT_CIPHERS)
77

78 11
    context.verify_mode = cert_reqs
79 11
    if getattr(context, 'check_hostname', None) is not None:
80
        # Platform-specific: Python 3.2
81
        # We do our own verification, including fingerprints and alternative
82
        # hostnames. So disable it here
83 11
        context.check_hostname = False
84 11
    return context
85

86

87 11
class ProxyConfiguration(object):
88
    """Represents a proxy configuration dictionary.
89

90
    This class represents a proxy configuration dictionary and provides utility
91
    functions to retreive well structured proxy urls and proxy headers from the
92
    proxy configuration dictionary.
93
    """
94 11
    def __init__(self, proxies=None):
95 11
        if proxies is None:
96 11
            proxies = {}
97 11
        self._proxies = proxies
98

99 11
    def proxy_url_for(self, url):
100
        """Retrieves the corresponding proxy url for a given url. """
101 11
        parsed_url = urlparse(url)
102 11
        proxy = self._proxies.get(parsed_url.scheme)
103 11
        if proxy:
104 11
            proxy = self._fix_proxy_url(proxy)
105 11
        return proxy
106

107 11
    def proxy_headers_for(self, proxy_url):
108
        """Retrieves the corresponding proxy headers for a given proxy url. """
109 11
        headers = {}
110 11
        username, password = self._get_auth_from_url(proxy_url)
111 11
        if username and password:
112 11
            basic_auth = self._construct_basic_auth(username, password)
113 11
            headers['Proxy-Authorization'] = basic_auth
114 11
        return headers
115

116 11
    def _fix_proxy_url(self, proxy_url):
117 11
        if proxy_url.startswith('http:') or proxy_url.startswith('https:'):
118 11
            return proxy_url
119 11
        elif proxy_url.startswith('//'):
120 11
            return 'http:' + proxy_url
121
        else:
122 11
            return 'http://' + proxy_url
123

124 11
    def _construct_basic_auth(self, username, password):
125 11
        auth_str = '{0}:{1}'.format(username, password)
126 11
        encoded_str = b64encode(auth_str.encode('ascii')).strip().decode()
127 11
        return 'Basic {0}'.format(encoded_str)
128

129 11
    def _get_auth_from_url(self, url):
130 11
        parsed_url = urlparse(url)
131 11
        try:
132 11
            return unquote(parsed_url.username), unquote(parsed_url.password)
133 11
        except (AttributeError, TypeError):
134 11
            return None, None
135

136

137 11
class URLLib3Session(object):
138
    """A basic HTTP client that supports connection pooling and proxies.
139

140
    This class is inspired by requests.adapters.HTTPAdapter, but has been
141
    boiled down to meet the use cases needed by botocore. For the most part
142
    this classes matches the functionality of HTTPAdapter in requests v2.7.0
143
    (the same as our vendored version). The only major difference of note is
144
    that we currently do not support sending chunked requests. While requests
145
    v2.7.0 implemented this themselves, later version urllib3 support this
146
    directly via a flag to urlopen so enabling it if needed should be trivial.
147
    """
148 11
    def __init__(self,
149
                 verify=True,
150
                 proxies=None,
151
                 timeout=None,
152
                 max_pool_connections=MAX_POOL_CONNECTIONS,
153
                 socket_options=None,
154
                 client_cert=None,
155
    ):
156 11
        self._verify = verify
157 11
        self._proxy_config = ProxyConfiguration(proxies=proxies)
158 11
        self._pool_classes_by_scheme = {
159
            'http': botocore.awsrequest.AWSHTTPConnectionPool,
160
            'https': botocore.awsrequest.AWSHTTPSConnectionPool,
161
        }
162 11
        if timeout is None:
163 11
            timeout = DEFAULT_TIMEOUT
164 11
        if not isinstance(timeout, (int, float)):
165 11
            timeout = Timeout(connect=timeout[0], read=timeout[1])
166

167 11
        self._cert_file = None
168 11
        self._key_file = None
169 11
        if isinstance(client_cert, str):
170 11
            self._cert_file = client_cert
171 11
        elif isinstance(client_cert, tuple):
172 11
            self._cert_file, self._key_file = client_cert
173

174 11
        self._timeout = timeout
175 11
        self._max_pool_connections = max_pool_connections
176 11
        self._socket_options = socket_options
177 11
        if socket_options is None:
178 11
            self._socket_options = []
179 11
        self._proxy_managers = {}
180 11
        self._manager = PoolManager(**self._get_pool_manager_kwargs())
181 11
        self._manager.pool_classes_by_scheme = self._pool_classes_by_scheme
182

183 11
    def _get_pool_manager_kwargs(self, **extra_kwargs):
184 11
        pool_manager_kwargs = {
185
            'strict': True,
186
            'timeout': self._timeout,
187
            'maxsize': self._max_pool_connections,
188
            'ssl_context': self._get_ssl_context(),
189
            'socket_options': self._socket_options,
190
            'cert_file': self._cert_file,
191
            'key_file': self._key_file,
192
        }
193 11
        pool_manager_kwargs.update(**extra_kwargs)
194 11
        return pool_manager_kwargs
195

196 11
    def _get_ssl_context(self):
197 11
        return create_urllib3_context()
198

199 11
    def _get_proxy_manager(self, proxy_url):
200 11
        if proxy_url not in self._proxy_managers:
201 11
            proxy_headers = self._proxy_config.proxy_headers_for(proxy_url)
202 11
            proxy_manager_kwargs = self._get_pool_manager_kwargs(
203
                proxy_headers=proxy_headers)
204 11
            proxy_manager = proxy_from_url(proxy_url, **proxy_manager_kwargs)
205 11
            proxy_manager.pool_classes_by_scheme = self._pool_classes_by_scheme
206 11
            self._proxy_managers[proxy_url] = proxy_manager
207

208 11
        return self._proxy_managers[proxy_url]
209

210 11
    def _path_url(self, url):
211 11
        parsed_url = urlparse(url)
212 11
        path = parsed_url.path
213 11
        if not path:
214 0
            path = '/'
215 11
        if parsed_url.query:
216 0
            path = path + '?' + parsed_url.query
217 11
        return path
218

219 11
    def _setup_ssl_cert(self, conn, url, verify):
220 11
        if url.lower().startswith('https') and verify:
221 11
            conn.cert_reqs = 'CERT_REQUIRED'
222 11
            conn.ca_certs = get_cert_path(verify)
223
        else:
224 11
            conn.cert_reqs = 'CERT_NONE'
225 11
            conn.ca_certs = None
226

227 11
    def _get_connection_manager(self, url, proxy_url=None):
228 11
        if proxy_url:
229 11
            manager = self._get_proxy_manager(proxy_url)
230
        else:
231 11
            manager = self._manager
232 11
        return manager
233

234 11
    def _get_request_target(self, url, proxy_url):
235 11
        if proxy_url and url.startswith('http:'):
236
            # HTTP proxies expect the request_target to be the absolute url to
237
            # know which host to establish a connection to
238 11
            return url
239
        else:
240
            # otherwise just set the request target to the url path
241 11
            return self._path_url(url)
242

243 11
    def _chunked(self, headers):
244 11
        return headers.get('Transfer-Encoding', '') == 'chunked'
245

246 11
    def send(self, request):
247 11
        try:
248 11
            proxy_url = self._proxy_config.proxy_url_for(request.url)
249 11
            manager = self._get_connection_manager(request.url, proxy_url)
250 11
            conn = manager.connection_from_url(request.url)
251 11
            self._setup_ssl_cert(conn, request.url, self._verify)
252

253 11
            request_target = self._get_request_target(request.url, proxy_url)
254 11
            urllib_response = conn.urlopen(
255
                method=request.method,
256
                url=request_target,
257
                body=request.body,
258
                headers=request.headers,
259
                retries=Retry(False),
260
                assert_same_host=False,
261
                preload_content=False,
262
                decode_content=False,
263
                chunked=self._chunked(request.headers),
264
            )
265

266 11
            http_response = botocore.awsrequest.AWSResponse(
267
                request.url,
268
                urllib_response.status,
269
                urllib_response.headers,
270
                urllib_response,
271
            )
272

273 11
            if not request.stream_output:
274
                # Cause the raw stream to be exhausted immediately. We do it
275
                # this way instead of using preload_content because
276
                # preload_content will never buffer chunked responses
277 11
                http_response.content
278

279 11
            return http_response
280 11
        except URLLib3SSLError as e:
281 0
            raise SSLError(endpoint_url=request.url, error=e)
282 11
        except (NewConnectionError, socket.gaierror) as e:
283 11
            raise EndpointConnectionError(endpoint_url=request.url, error=e)
284 11
        except ProxyError as e:
285 0
            raise ProxyConnectionError(proxy_url=proxy_url, error=e)
286 11
        except URLLib3ConnectTimeoutError as e:
287 5
            raise ConnectTimeoutError(endpoint_url=request.url, error=e)
288 11
        except URLLib3ReadTimeoutError as e:
289 0
            raise ReadTimeoutError(endpoint_url=request.url, error=e)
290 11
        except ProtocolError as e:
291 11
            raise ConnectionClosedError(
292
                error=e,
293
                request=request,
294
                endpoint_url=request.url
295
            )
296 0
        except Exception as e:
297 0
            message = 'Exception received when sending urllib3 HTTP request'
298 0
            logger.debug(message, exc_info=True)
299 0
            raise HTTPClientError(error=e)

Read our documentation on viewing source code .

Loading