mirror of
https://github.com/wakatime/sublime-wakatime.git
synced 2023-08-10 21:13:02 +03:00
using wakatime package v0.4.2
This commit is contained in:
parent
5969830ef6
commit
f4b6f4c4ea
@ -2,6 +2,13 @@
|
|||||||
History
|
History
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
|
||||||
|
0.4.2 (2013-09-04)
|
||||||
|
++++++++++++++++++
|
||||||
|
|
||||||
|
- Using requests package v1.2.3 from pypi
|
||||||
|
|
||||||
|
|
||||||
0.4.1 (2013-08-25)
|
0.4.1 (2013-08-25)
|
||||||
++++++++++++++++++
|
++++++++++++++++++
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
__title__ = 'wakatime'
|
__title__ = 'wakatime'
|
||||||
__version__ = '0.4.1'
|
__version__ = '0.4.2'
|
||||||
__author__ = 'Alan Hamlett'
|
__author__ = 'Alan Hamlett'
|
||||||
__license__ = 'BSD'
|
__license__ = 'BSD'
|
||||||
__copyright__ = 'Copyright 2013 Alan Hamlett'
|
__copyright__ = 'Copyright 2013 Alan Hamlett'
|
||||||
|
@ -118,7 +118,7 @@ class HTTPAdapter(BaseAdapter):
|
|||||||
:param verify: Whether we should actually verify the certificate.
|
:param verify: Whether we should actually verify the certificate.
|
||||||
:param cert: The SSL certificate to verify.
|
:param cert: The SSL certificate to verify.
|
||||||
"""
|
"""
|
||||||
if url.lower().startswith('https') and verify:
|
if url.startswith('https') and verify:
|
||||||
|
|
||||||
cert_loc = None
|
cert_loc = None
|
||||||
|
|
||||||
@ -190,13 +190,13 @@ class HTTPAdapter(BaseAdapter):
|
|||||||
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
||||||
"""
|
"""
|
||||||
proxies = proxies or {}
|
proxies = proxies or {}
|
||||||
proxy = proxies.get(urlparse(url.lower()).scheme)
|
proxy = proxies.get(urlparse(url).scheme)
|
||||||
|
|
||||||
if proxy:
|
if proxy:
|
||||||
proxy = prepend_scheme_if_needed(proxy, urlparse(url.lower()).scheme)
|
proxy = prepend_scheme_if_needed(proxy, urlparse(url).scheme)
|
||||||
conn = ProxyManager(self.poolmanager.connection_from_url(proxy))
|
conn = ProxyManager(self.poolmanager.connection_from_url(proxy))
|
||||||
else:
|
else:
|
||||||
conn = self.poolmanager.connection_from_url(url.lower())
|
conn = self.poolmanager.connection_from_url(url)
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
@ -214,7 +214,7 @@ class HTTPAdapter(BaseAdapter):
|
|||||||
If the message is being sent through a proxy, the full URL has to be
|
If the message is being sent through a proxy, the full URL has to be
|
||||||
used. Otherwise, we should only use the path portion of the URL.
|
used. Otherwise, we should only use the path portion of the URL.
|
||||||
|
|
||||||
This should not be called from user code, and is only exposed for use
|
This shoudl not be called from user code, and is only exposed for use
|
||||||
when subclassing the
|
when subclassing the
|
||||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||||
|
|
||||||
|
@ -83,14 +83,13 @@ except ImportError:
|
|||||||
# ---------
|
# ---------
|
||||||
|
|
||||||
if is_py2:
|
if is_py2:
|
||||||
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
|
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode
|
||||||
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
|
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
|
||||||
from urllib2 import parse_http_list
|
from urllib2 import parse_http_list
|
||||||
import cookielib
|
import cookielib
|
||||||
from Cookie import Morsel
|
from Cookie import Morsel
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from .packages.urllib3.packages.ordered_dict import OrderedDict
|
from .packages.urllib3.packages.ordered_dict import OrderedDict
|
||||||
from httplib import IncompleteRead
|
|
||||||
|
|
||||||
builtin_str = str
|
builtin_str = str
|
||||||
bytes = str
|
bytes = str
|
||||||
@ -101,12 +100,11 @@ if is_py2:
|
|||||||
|
|
||||||
elif is_py3:
|
elif is_py3:
|
||||||
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
|
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
|
||||||
from urllib.request import parse_http_list, getproxies, proxy_bypass
|
from urllib.request import parse_http_list
|
||||||
from http import cookiejar as cookielib
|
from http import cookiejar as cookielib
|
||||||
from http.cookies import Morsel
|
from http.cookies import Morsel
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from http.client import IncompleteRead
|
|
||||||
|
|
||||||
builtin_str = str
|
builtin_str = str
|
||||||
str = str
|
str = str
|
||||||
|
@ -6,7 +6,6 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
|
|||||||
requests.utils imports from here, so be careful with imports.
|
requests.utils imports from here, so be careful with imports.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import time
|
|
||||||
import collections
|
import collections
|
||||||
from .compat import cookielib, urlparse, Morsel
|
from .compat import cookielib, urlparse, Morsel
|
||||||
|
|
||||||
@ -74,10 +73,6 @@ class MockRequest(object):
|
|||||||
def origin_req_host(self):
|
def origin_req_host(self):
|
||||||
return self.get_origin_req_host()
|
return self.get_origin_req_host()
|
||||||
|
|
||||||
@property
|
|
||||||
def host(self):
|
|
||||||
return self.get_host()
|
|
||||||
|
|
||||||
|
|
||||||
class MockResponse(object):
|
class MockResponse(object):
|
||||||
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
||||||
@ -263,11 +258,6 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
|
|||||||
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
|
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
|
||||||
remove_cookie_by_name(self, name)
|
remove_cookie_by_name(self, name)
|
||||||
|
|
||||||
def set_cookie(self, cookie, *args, **kwargs):
|
|
||||||
if cookie.value.startswith('"') and cookie.value.endswith('"'):
|
|
||||||
cookie.value = cookie.value.replace('\\"', '')
|
|
||||||
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
|
|
||||||
|
|
||||||
def update(self, other):
|
def update(self, other):
|
||||||
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
||||||
if isinstance(other, cookielib.CookieJar):
|
if isinstance(other, cookielib.CookieJar):
|
||||||
@ -364,23 +354,19 @@ def create_cookie(name, value, **kwargs):
|
|||||||
|
|
||||||
def morsel_to_cookie(morsel):
|
def morsel_to_cookie(morsel):
|
||||||
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
||||||
expires = None
|
|
||||||
if morsel["max-age"]:
|
|
||||||
expires = time.time() + morsel["max-age"]
|
|
||||||
elif morsel['expires']:
|
|
||||||
expires = morsel['expires']
|
|
||||||
if type(expires) == type(""):
|
|
||||||
time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
|
|
||||||
expires = time.mktime(time.strptime(expires, time_template))
|
|
||||||
c = create_cookie(
|
c = create_cookie(
|
||||||
name=morsel.key,
|
name=morsel.key,
|
||||||
value=morsel.value,
|
value=morsel.value,
|
||||||
version=morsel['version'] or 0,
|
version=morsel['version'] or 0,
|
||||||
port=None,
|
port=None,
|
||||||
|
port_specified=False,
|
||||||
domain=morsel['domain'],
|
domain=morsel['domain'],
|
||||||
|
domain_specified=bool(morsel['domain']),
|
||||||
|
domain_initial_dot=morsel['domain'].startswith('.'),
|
||||||
path=morsel['path'],
|
path=morsel['path'],
|
||||||
|
path_specified=bool(morsel['path']),
|
||||||
secure=bool(morsel['secure']),
|
secure=bool(morsel['secure']),
|
||||||
expires=expires,
|
expires=morsel['max-age'] or morsel['expires'],
|
||||||
discard=False,
|
discard=False,
|
||||||
comment=morsel['comment'],
|
comment=morsel['comment'],
|
||||||
comment_url=bool(morsel['comment']),
|
comment_url=bool(morsel['comment']),
|
||||||
|
@ -53,7 +53,3 @@ class InvalidSchema(RequestException, ValueError):
|
|||||||
|
|
||||||
class InvalidURL(RequestException, ValueError):
|
class InvalidURL(RequestException, ValueError):
|
||||||
""" The URL provided was somehow invalid. """
|
""" The URL provided was somehow invalid. """
|
||||||
|
|
||||||
|
|
||||||
class ChunkedEncodingError(RequestException):
|
|
||||||
"""The server declared chunked encoding but sent an invalid chunk."""
|
|
||||||
|
@ -19,16 +19,14 @@ from .auth import HTTPBasicAuth
|
|||||||
from .cookies import cookiejar_from_dict, get_cookie_header
|
from .cookies import cookiejar_from_dict, get_cookie_header
|
||||||
from .packages.urllib3.filepost import encode_multipart_formdata
|
from .packages.urllib3.filepost import encode_multipart_formdata
|
||||||
from .packages.urllib3.util import parse_url
|
from .packages.urllib3.util import parse_url
|
||||||
from .exceptions import (
|
from .exceptions import HTTPError, RequestException, MissingSchema, InvalidURL
|
||||||
HTTPError, RequestException, MissingSchema, InvalidURL,
|
|
||||||
ChunkedEncodingError)
|
|
||||||
from .utils import (
|
from .utils import (
|
||||||
guess_filename, get_auth_from_url, requote_uri,
|
guess_filename, get_auth_from_url, requote_uri,
|
||||||
stream_decode_response_unicode, to_key_val_list, parse_header_links,
|
stream_decode_response_unicode, to_key_val_list, parse_header_links,
|
||||||
iter_slices, guess_json_utf, super_len)
|
iter_slices, guess_json_utf, super_len)
|
||||||
from .compat import (
|
from .compat import (
|
||||||
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
|
cookielib, urlparse, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
|
||||||
is_py2, chardet, json, builtin_str, basestring, IncompleteRead)
|
is_py2, chardet, json, builtin_str, basestring)
|
||||||
|
|
||||||
CONTENT_CHUNK_SIZE = 10 * 1024
|
CONTENT_CHUNK_SIZE = 10 * 1024
|
||||||
ITER_CHUNK_SIZE = 512
|
ITER_CHUNK_SIZE = 512
|
||||||
@ -211,6 +209,7 @@ class Request(RequestHooksMixin):
|
|||||||
self.params = params
|
self.params = params
|
||||||
self.auth = auth
|
self.auth = auth
|
||||||
self.cookies = cookies
|
self.cookies = cookies
|
||||||
|
self.hooks = hooks
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<Request [%s]>' % (self.method)
|
return '<Request [%s]>' % (self.method)
|
||||||
@ -218,17 +217,19 @@ class Request(RequestHooksMixin):
|
|||||||
def prepare(self):
|
def prepare(self):
|
||||||
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
||||||
p = PreparedRequest()
|
p = PreparedRequest()
|
||||||
p.prepare(
|
|
||||||
method=self.method,
|
p.prepare_method(self.method)
|
||||||
url=self.url,
|
p.prepare_url(self.url, self.params)
|
||||||
headers=self.headers,
|
p.prepare_headers(self.headers)
|
||||||
files=self.files,
|
p.prepare_cookies(self.cookies)
|
||||||
data=self.data,
|
p.prepare_body(self.data, self.files)
|
||||||
params=self.params,
|
p.prepare_auth(self.auth, self.url)
|
||||||
auth=self.auth,
|
# Note that prepare_auth must be last to enable authentication schemes
|
||||||
cookies=self.cookies,
|
# such as OAuth to work on a fully prepared request.
|
||||||
hooks=self.hooks,
|
|
||||||
)
|
# This MUST go after prepare_auth. Authenticators could add a hook
|
||||||
|
p.prepare_hooks(self.hooks)
|
||||||
|
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
@ -263,34 +264,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||||||
#: dictionary of callback hooks, for internal usage.
|
#: dictionary of callback hooks, for internal usage.
|
||||||
self.hooks = default_hooks()
|
self.hooks = default_hooks()
|
||||||
|
|
||||||
def prepare(self, method=None, url=None, headers=None, files=None,
|
|
||||||
data=None, params=None, auth=None, cookies=None, hooks=None):
|
|
||||||
"""Prepares the the entire request with the given parameters."""
|
|
||||||
|
|
||||||
self.prepare_method(method)
|
|
||||||
self.prepare_url(url, params)
|
|
||||||
self.prepare_headers(headers)
|
|
||||||
self.prepare_cookies(cookies)
|
|
||||||
self.prepare_body(data, files)
|
|
||||||
self.prepare_auth(auth, url)
|
|
||||||
# Note that prepare_auth must be last to enable authentication schemes
|
|
||||||
# such as OAuth to work on a fully prepared request.
|
|
||||||
|
|
||||||
# This MUST go after prepare_auth. Authenticators could add a hook
|
|
||||||
self.prepare_hooks(hooks)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<PreparedRequest [%s]>' % (self.method)
|
return '<PreparedRequest [%s]>' % (self.method)
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
p = PreparedRequest()
|
|
||||||
p.method = self.method
|
|
||||||
p.url = self.url
|
|
||||||
p.headers = self.headers
|
|
||||||
p.body = self.body
|
|
||||||
p.hooks = self.hooks
|
|
||||||
return p
|
|
||||||
|
|
||||||
def prepare_method(self, method):
|
def prepare_method(self, method):
|
||||||
"""Prepares the given HTTP method."""
|
"""Prepares the given HTTP method."""
|
||||||
self.method = method
|
self.method = method
|
||||||
@ -376,6 +352,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||||||
body = None
|
body = None
|
||||||
content_type = None
|
content_type = None
|
||||||
length = None
|
length = None
|
||||||
|
is_stream = False
|
||||||
|
|
||||||
is_stream = all([
|
is_stream = all([
|
||||||
hasattr(data, '__iter__'),
|
hasattr(data, '__iter__'),
|
||||||
@ -387,7 +364,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||||||
try:
|
try:
|
||||||
length = super_len(data)
|
length = super_len(data)
|
||||||
except (TypeError, AttributeError):
|
except (TypeError, AttributeError):
|
||||||
length = None
|
length = False
|
||||||
|
|
||||||
if is_stream:
|
if is_stream:
|
||||||
body = data
|
body = data
|
||||||
@ -395,10 +372,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||||||
if files:
|
if files:
|
||||||
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
|
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
|
||||||
|
|
||||||
if length is not None:
|
if length:
|
||||||
self.headers['Content-Length'] = str(length)
|
self.headers['Content-Length'] = str(length)
|
||||||
else:
|
else:
|
||||||
self.headers['Transfer-Encoding'] = 'chunked'
|
self.headers['Transfer-Encoding'] = 'chunked'
|
||||||
|
# Check if file, fo, generator, iterator.
|
||||||
|
# If not, run through normal process.
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Multi-part file uploads.
|
# Multi-part file uploads.
|
||||||
if files:
|
if files:
|
||||||
@ -557,22 +537,11 @@ class Response(object):
|
|||||||
return iter_slices(self._content, chunk_size)
|
return iter_slices(self._content, chunk_size)
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
try:
|
while 1:
|
||||||
# Special case for urllib3.
|
chunk = self.raw.read(chunk_size, decode_content=True)
|
||||||
try:
|
if not chunk:
|
||||||
for chunk in self.raw.stream(chunk_size,
|
break
|
||||||
decode_content=True):
|
yield chunk
|
||||||
yield chunk
|
|
||||||
except IncompleteRead as e:
|
|
||||||
raise ChunkedEncodingError(e)
|
|
||||||
except AttributeError:
|
|
||||||
# Standard file-like object.
|
|
||||||
while 1:
|
|
||||||
chunk = self.raw.read(chunk_size)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
self._content_consumed = True
|
self._content_consumed = True
|
||||||
|
|
||||||
gen = generate()
|
gen = generate()
|
||||||
@ -714,9 +683,4 @@ class Response(object):
|
|||||||
raise HTTPError(http_error_msg, response=self)
|
raise HTTPError(http_error_msg, response=self)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Closes the underlying file descriptor and releases the connection
|
|
||||||
back to the pool.
|
|
||||||
|
|
||||||
*Note: Should not normally need to be called explicitly.*
|
|
||||||
"""
|
|
||||||
return self.raw.release_conn()
|
return self.raw.release_conn()
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||||
|
|
||||||
from collections import MutableMapping
|
from collections import MutableMapping
|
||||||
from threading import RLock
|
from threading import Lock
|
||||||
|
|
||||||
try: # Python 2.7+
|
try: # Python 2.7+
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
@ -40,18 +40,18 @@ class RecentlyUsedContainer(MutableMapping):
|
|||||||
self.dispose_func = dispose_func
|
self.dispose_func = dispose_func
|
||||||
|
|
||||||
self._container = self.ContainerCls()
|
self._container = self.ContainerCls()
|
||||||
self.lock = RLock()
|
self._lock = Lock()
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
# Re-insert the item, moving it to the end of the eviction line.
|
# Re-insert the item, moving it to the end of the eviction line.
|
||||||
with self.lock:
|
with self._lock:
|
||||||
item = self._container.pop(key)
|
item = self._container.pop(key)
|
||||||
self._container[key] = item
|
self._container[key] = item
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
evicted_value = _Null
|
evicted_value = _Null
|
||||||
with self.lock:
|
with self._lock:
|
||||||
# Possibly evict the existing value of 'key'
|
# Possibly evict the existing value of 'key'
|
||||||
evicted_value = self._container.get(key, _Null)
|
evicted_value = self._container.get(key, _Null)
|
||||||
self._container[key] = value
|
self._container[key] = value
|
||||||
@ -65,21 +65,21 @@ class RecentlyUsedContainer(MutableMapping):
|
|||||||
self.dispose_func(evicted_value)
|
self.dispose_func(evicted_value)
|
||||||
|
|
||||||
def __delitem__(self, key):
|
def __delitem__(self, key):
|
||||||
with self.lock:
|
with self._lock:
|
||||||
value = self._container.pop(key)
|
value = self._container.pop(key)
|
||||||
|
|
||||||
if self.dispose_func:
|
if self.dispose_func:
|
||||||
self.dispose_func(value)
|
self.dispose_func(value)
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
with self.lock:
|
with self._lock:
|
||||||
return len(self._container)
|
return len(self._container)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
|
raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
with self.lock:
|
with self._lock:
|
||||||
# Copy pointers to all values, then wipe the mapping
|
# Copy pointers to all values, then wipe the mapping
|
||||||
# under Python 2, this copies the list of values twice :-|
|
# under Python 2, this copies the list of values twice :-|
|
||||||
values = list(self._container.values())
|
values = list(self._container.values())
|
||||||
@ -90,5 +90,5 @@ class RecentlyUsedContainer(MutableMapping):
|
|||||||
self.dispose_func(value)
|
self.dispose_func(value)
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
with self.lock:
|
with self._lock:
|
||||||
return self._container.keys()
|
return self._container.keys()
|
||||||
|
@ -26,10 +26,7 @@ except ImportError:
|
|||||||
|
|
||||||
try: # Compiled with SSL?
|
try: # Compiled with SSL?
|
||||||
HTTPSConnection = object
|
HTTPSConnection = object
|
||||||
|
BaseSSLError = None
|
||||||
class BaseSSLError(BaseException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
ssl = None
|
ssl = None
|
||||||
|
|
||||||
try: # Python 3
|
try: # Python 3
|
||||||
@ -113,7 +110,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||||||
if self.assert_fingerprint:
|
if self.assert_fingerprint:
|
||||||
assert_fingerprint(self.sock.getpeercert(binary_form=True),
|
assert_fingerprint(self.sock.getpeercert(binary_form=True),
|
||||||
self.assert_fingerprint)
|
self.assert_fingerprint)
|
||||||
elif self.assert_hostname is not False:
|
else:
|
||||||
match_hostname(self.sock.getpeercert(),
|
match_hostname(self.sock.getpeercert(),
|
||||||
self.assert_hostname or self.host)
|
self.assert_hostname or self.host)
|
||||||
|
|
||||||
@ -155,8 +152,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||||||
:class:`httplib.HTTPConnection`.
|
:class:`httplib.HTTPConnection`.
|
||||||
|
|
||||||
:param timeout:
|
:param timeout:
|
||||||
Socket timeout in seconds for each individual connection, can be
|
Socket timeout for each individual connection, can be a float. None
|
||||||
a float. None disables timeout.
|
disables timeout.
|
||||||
|
|
||||||
:param maxsize:
|
:param maxsize:
|
||||||
Number of connections to save that can be reused. More than 1 is useful
|
Number of connections to save that can be reused. More than 1 is useful
|
||||||
@ -379,7 +376,6 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||||||
|
|
||||||
:param timeout:
|
:param timeout:
|
||||||
If specified, overrides the default timeout for this one request.
|
If specified, overrides the default timeout for this one request.
|
||||||
It may be a float (in seconds).
|
|
||||||
|
|
||||||
:param pool_timeout:
|
:param pool_timeout:
|
||||||
If set and the pool is set to block=True, then this method will
|
If set and the pool is set to block=True, then this method will
|
||||||
@ -414,6 +410,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||||||
|
|
||||||
# Check host
|
# Check host
|
||||||
if assert_same_host and not self.is_same_host(url):
|
if assert_same_host and not self.is_same_host(url):
|
||||||
|
host = "%s://%s" % (self.scheme, self.host)
|
||||||
|
if self.port:
|
||||||
|
host = "%s:%d" % (host, self.port)
|
||||||
|
|
||||||
raise HostChangedError(self, url, retries - 1)
|
raise HostChangedError(self, url, retries - 1)
|
||||||
|
|
||||||
conn = None
|
conn = None
|
||||||
@ -513,7 +513,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||||||
|
|
||||||
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
|
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
|
||||||
``assert_hostname`` and ``host`` in this order to verify connections.
|
``assert_hostname`` and ``host`` in this order to verify connections.
|
||||||
If ``assert_hostname`` is False, no verification is done.
|
|
||||||
|
|
||||||
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and
|
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and
|
||||||
``ssl_version`` are only used if :mod:`ssl` is available and are fed into
|
``ssl_version`` are only used if :mod:`ssl` is available and are fed into
|
||||||
|
@ -33,7 +33,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
|
|||||||
def __init__(self, user, pw, authurl, *args, **kwargs):
|
def __init__(self, user, pw, authurl, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
authurl is a random URL on the server that is protected by NTLM.
|
authurl is a random URL on the server that is protected by NTLM.
|
||||||
user is the Windows user, probably in the DOMAIN\\username format.
|
user is the Windows user, probably in the DOMAIN\username format.
|
||||||
pw is the password for the user.
|
pw is the password for the user.
|
||||||
"""
|
"""
|
||||||
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
|
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
|
||||||
|
@ -106,9 +106,6 @@ class WrappedSocket(object):
|
|||||||
self.connection = connection
|
self.connection = connection
|
||||||
self.socket = socket
|
self.socket = socket
|
||||||
|
|
||||||
def fileno(self):
|
|
||||||
return self.socket.fileno()
|
|
||||||
|
|
||||||
def makefile(self, mode, bufsize=-1):
|
def makefile(self, mode, bufsize=-1):
|
||||||
return _fileobject(self.connection, mode, bufsize)
|
return _fileobject(self.connection, mode, bufsize)
|
||||||
|
|
||||||
@ -118,9 +115,6 @@ class WrappedSocket(object):
|
|||||||
def sendall(self, data):
|
def sendall(self, data):
|
||||||
return self.connection.sendall(data)
|
return self.connection.sendall(data)
|
||||||
|
|
||||||
def close(self):
|
|
||||||
return self.connection.shutdown()
|
|
||||||
|
|
||||||
def getpeercert(self, binary_form=False):
|
def getpeercert(self, binary_form=False):
|
||||||
x509 = self.connection.get_peer_certificate()
|
x509 = self.connection.get_peer_certificate()
|
||||||
if not x509:
|
if not x509:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# urllib3/filepost.py
|
# urllib3/filepost.py
|
||||||
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
|
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
|
||||||
#
|
#
|
||||||
# This module is part of urllib3 and is released under
|
# This module is part of urllib3 and is released under
|
||||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||||
|
@ -6,11 +6,6 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
try: # Python 3
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
except ImportError:
|
|
||||||
from urlparse import urljoin
|
|
||||||
|
|
||||||
from ._collections import RecentlyUsedContainer
|
from ._collections import RecentlyUsedContainer
|
||||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||||
from .connectionpool import connection_from_url, port_by_scheme
|
from .connectionpool import connection_from_url, port_by_scheme
|
||||||
@ -104,16 +99,15 @@ class PoolManager(RequestMethods):
|
|||||||
|
|
||||||
pool_key = (scheme, host, port)
|
pool_key = (scheme, host, port)
|
||||||
|
|
||||||
with self.pools.lock:
|
# If the scheme, host, or port doesn't match existing open connections,
|
||||||
# If the scheme, host, or port doesn't match existing open connections,
|
# open a new ConnectionPool.
|
||||||
# open a new ConnectionPool.
|
pool = self.pools.get(pool_key)
|
||||||
pool = self.pools.get(pool_key)
|
if pool:
|
||||||
if pool:
|
return pool
|
||||||
return pool
|
|
||||||
|
|
||||||
# Make a fresh ConnectionPool of the desired type
|
# Make a fresh ConnectionPool of the desired type
|
||||||
pool = self._new_pool(scheme, host, port)
|
pool = self._new_pool(scheme, host, port)
|
||||||
self.pools[pool_key] = pool
|
self.pools[pool_key] = pool
|
||||||
return pool
|
return pool
|
||||||
|
|
||||||
def connection_from_url(self, url):
|
def connection_from_url(self, url):
|
||||||
@ -151,10 +145,6 @@ class PoolManager(RequestMethods):
|
|||||||
if not redirect_location:
|
if not redirect_location:
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# Support relative URLs for redirecting.
|
|
||||||
redirect_location = urljoin(url, redirect_location)
|
|
||||||
|
|
||||||
# RFC 2616, Section 10.3.4
|
|
||||||
if response.status == 303:
|
if response.status == 303:
|
||||||
method = 'GET'
|
method = 'GET'
|
||||||
|
|
||||||
@ -181,9 +171,9 @@ class ProxyManager(RequestMethods):
|
|||||||
"""
|
"""
|
||||||
headers_ = {'Accept': '*/*'}
|
headers_ = {'Accept': '*/*'}
|
||||||
|
|
||||||
netloc = parse_url(url).netloc
|
host = parse_url(url).host
|
||||||
if netloc:
|
if host:
|
||||||
headers_['Host'] = netloc
|
headers_['Host'] = host
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
headers_.update(headers)
|
headers_.update(headers)
|
||||||
|
@ -30,7 +30,7 @@ class RequestMethods(object):
|
|||||||
in the URL (such as GET, HEAD, DELETE).
|
in the URL (such as GET, HEAD, DELETE).
|
||||||
|
|
||||||
:meth:`.request_encode_body` is for sending requests whose fields are
|
:meth:`.request_encode_body` is for sending requests whose fields are
|
||||||
encoded in the *body* of the request using multipart or www-form-urlencoded
|
encoded in the *body* of the request using multipart or www-orm-urlencoded
|
||||||
(such as for POST, PUT, PATCH).
|
(such as for POST, PUT, PATCH).
|
||||||
|
|
||||||
:meth:`.request` is for making any kind of request, it will look up the
|
:meth:`.request` is for making any kind of request, it will look up the
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# urllib3/response.py
|
# urllib3/response.py
|
||||||
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
|
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
|
||||||
#
|
#
|
||||||
# This module is part of urllib3 and is released under
|
# This module is part of urllib3 and is released under
|
||||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||||
@ -7,11 +7,9 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import zlib
|
import zlib
|
||||||
import io
|
|
||||||
|
|
||||||
from .exceptions import DecodeError
|
from .exceptions import DecodeError
|
||||||
from .packages.six import string_types as basestring, binary_type
|
from .packages.six import string_types as basestring, binary_type
|
||||||
from .util import is_fp_closed
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -50,7 +48,7 @@ def _get_decoder(mode):
|
|||||||
return DeflateDecoder()
|
return DeflateDecoder()
|
||||||
|
|
||||||
|
|
||||||
class HTTPResponse(io.IOBase):
|
class HTTPResponse(object):
|
||||||
"""
|
"""
|
||||||
HTTP Response container.
|
HTTP Response container.
|
||||||
|
|
||||||
@ -185,11 +183,9 @@ class HTTPResponse(io.IOBase):
|
|||||||
try:
|
try:
|
||||||
if decode_content and self._decoder:
|
if decode_content and self._decoder:
|
||||||
data = self._decoder.decompress(data)
|
data = self._decoder.decompress(data)
|
||||||
except (IOError, zlib.error) as e:
|
except (IOError, zlib.error):
|
||||||
raise DecodeError(
|
raise DecodeError("Received response with content-encoding: %s, but "
|
||||||
"Received response with content-encoding: %s, but "
|
"failed to decode it." % content_encoding)
|
||||||
"failed to decode it." % content_encoding,
|
|
||||||
e)
|
|
||||||
|
|
||||||
if flush_decoder and self._decoder:
|
if flush_decoder and self._decoder:
|
||||||
buf = self._decoder.decompress(binary_type())
|
buf = self._decoder.decompress(binary_type())
|
||||||
@ -204,29 +200,6 @@ class HTTPResponse(io.IOBase):
|
|||||||
if self._original_response and self._original_response.isclosed():
|
if self._original_response and self._original_response.isclosed():
|
||||||
self.release_conn()
|
self.release_conn()
|
||||||
|
|
||||||
def stream(self, amt=2**16, decode_content=None):
|
|
||||||
"""
|
|
||||||
A generator wrapper for the read() method. A call will block until
|
|
||||||
``amt`` bytes have been read from the connection or until the
|
|
||||||
connection is closed.
|
|
||||||
|
|
||||||
:param amt:
|
|
||||||
How much of the content to read. The generator will return up to
|
|
||||||
much data per iteration, but may return less. This is particularly
|
|
||||||
likely when using compressed data. However, the empty string will
|
|
||||||
never be returned.
|
|
||||||
|
|
||||||
:param decode_content:
|
|
||||||
If True, will attempt to decode the body based on the
|
|
||||||
'content-encoding' header.
|
|
||||||
"""
|
|
||||||
while not is_fp_closed(self._fp):
|
|
||||||
data = self.read(amt=amt, decode_content=decode_content)
|
|
||||||
|
|
||||||
if data:
|
|
||||||
yield data
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_httplib(ResponseCls, r, **response_kw):
|
def from_httplib(ResponseCls, r, **response_kw):
|
||||||
"""
|
"""
|
||||||
@ -266,35 +239,3 @@ class HTTPResponse(io.IOBase):
|
|||||||
|
|
||||||
def getheader(self, name, default=None):
|
def getheader(self, name, default=None):
|
||||||
return self.headers.get(name, default)
|
return self.headers.get(name, default)
|
||||||
|
|
||||||
# Overrides from io.IOBase
|
|
||||||
def close(self):
|
|
||||||
if not self.closed:
|
|
||||||
self._fp.close()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def closed(self):
|
|
||||||
if self._fp is None:
|
|
||||||
return True
|
|
||||||
elif hasattr(self._fp, 'closed'):
|
|
||||||
return self._fp.closed
|
|
||||||
elif hasattr(self._fp, 'isclosed'): # Python 2
|
|
||||||
return self._fp.isclosed()
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def fileno(self):
|
|
||||||
if self._fp is None:
|
|
||||||
raise IOError("HTTPResponse has no file to get a fileno from")
|
|
||||||
elif hasattr(self._fp, "fileno"):
|
|
||||||
return self._fp.fileno()
|
|
||||||
else:
|
|
||||||
raise IOError("The file-like object this HTTPResponse is wrapped "
|
|
||||||
"around has no file descriptor")
|
|
||||||
|
|
||||||
def flush(self):
|
|
||||||
if self._fp is not None and hasattr(self._fp, 'flush'):
|
|
||||||
return self._fp.flush()
|
|
||||||
|
|
||||||
def readable(self):
|
|
||||||
return True
|
|
||||||
|
@ -31,6 +31,7 @@ try: # Test for SSL features
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
from .packages import six
|
from .packages import six
|
||||||
from .exceptions import LocationParseError, SSLError
|
from .exceptions import LocationParseError, SSLError
|
||||||
|
|
||||||
@ -60,13 +61,6 @@ class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query',
|
|||||||
|
|
||||||
return uri
|
return uri
|
||||||
|
|
||||||
@property
|
|
||||||
def netloc(self):
|
|
||||||
"""Network location including host and port"""
|
|
||||||
if self.port:
|
|
||||||
return '%s:%d' % (self.host, self.port)
|
|
||||||
return self.host
|
|
||||||
|
|
||||||
|
|
||||||
def split_first(s, delims):
|
def split_first(s, delims):
|
||||||
"""
|
"""
|
||||||
@ -120,7 +114,7 @@ def parse_url(url):
|
|||||||
|
|
||||||
# While this code has overlap with stdlib's urlparse, it is much
|
# While this code has overlap with stdlib's urlparse, it is much
|
||||||
# simplified for our needs and less annoying.
|
# simplified for our needs and less annoying.
|
||||||
# Additionally, this implementations does silly things to be optimal
|
# Additionally, this imeplementations does silly things to be optimal
|
||||||
# on CPython.
|
# on CPython.
|
||||||
|
|
||||||
scheme = None
|
scheme = None
|
||||||
@ -149,8 +143,7 @@ def parse_url(url):
|
|||||||
|
|
||||||
# IPv6
|
# IPv6
|
||||||
if url and url[0] == '[':
|
if url and url[0] == '[':
|
||||||
host, url = url.split(']', 1)
|
host, url = url[1:].split(']', 1)
|
||||||
host += ']'
|
|
||||||
|
|
||||||
# Port
|
# Port
|
||||||
if ':' in url:
|
if ':' in url:
|
||||||
@ -348,20 +341,6 @@ def assert_fingerprint(cert, fingerprint):
|
|||||||
.format(hexlify(fingerprint_bytes),
|
.format(hexlify(fingerprint_bytes),
|
||||||
hexlify(cert_digest)))
|
hexlify(cert_digest)))
|
||||||
|
|
||||||
def is_fp_closed(obj):
|
|
||||||
"""
|
|
||||||
Checks whether a given file-like object is closed.
|
|
||||||
|
|
||||||
:param obj:
|
|
||||||
The file-like object to check.
|
|
||||||
"""
|
|
||||||
if hasattr(obj, 'fp'):
|
|
||||||
# Object is a container for another file-like object that gets released
|
|
||||||
# on exhaustion (e.g. HTTPResponse)
|
|
||||||
return obj.fp is None
|
|
||||||
|
|
||||||
return obj.closed
|
|
||||||
|
|
||||||
|
|
||||||
if SSLContext is not None: # Python 3.2+
|
if SSLContext is not None: # Python 3.2+
|
||||||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
|
@ -71,13 +71,15 @@ class SessionRedirectMixin(object):
|
|||||||
"""Receives a Response. Returns a generator of Responses."""
|
"""Receives a Response. Returns a generator of Responses."""
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
|
prepared_request = PreparedRequest()
|
||||||
|
prepared_request.body = req.body
|
||||||
|
prepared_request.headers = req.headers.copy()
|
||||||
|
prepared_request.hooks = req.hooks
|
||||||
|
prepared_request.method = req.method
|
||||||
|
prepared_request.url = req.url
|
||||||
|
|
||||||
# ((resp.status_code is codes.see_other))
|
# ((resp.status_code is codes.see_other))
|
||||||
while (('location' in resp.headers and resp.status_code in REDIRECT_STATI)):
|
while (('location' in resp.headers and resp.status_code in REDIRECT_STATI)):
|
||||||
prepared_request = PreparedRequest()
|
|
||||||
prepared_request.body = req.body
|
|
||||||
prepared_request.headers = req.headers.copy()
|
|
||||||
prepared_request.hooks = req.hooks
|
|
||||||
|
|
||||||
resp.content # Consume socket so it can be released
|
resp.content # Consume socket so it can be released
|
||||||
|
|
||||||
@ -88,18 +90,13 @@ class SessionRedirectMixin(object):
|
|||||||
resp.close()
|
resp.close()
|
||||||
|
|
||||||
url = resp.headers['location']
|
url = resp.headers['location']
|
||||||
method = req.method
|
method = prepared_request.method
|
||||||
|
|
||||||
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
||||||
if url.startswith('//'):
|
if url.startswith('//'):
|
||||||
parsed_rurl = urlparse(resp.url)
|
parsed_rurl = urlparse(resp.url)
|
||||||
url = '%s:%s' % (parsed_rurl.scheme, url)
|
url = '%s:%s' % (parsed_rurl.scheme, url)
|
||||||
|
|
||||||
# The scheme should be lower case...
|
|
||||||
if '://' in url:
|
|
||||||
scheme, uri = url.split('://', 1)
|
|
||||||
url = '%s://%s' % (scheme.lower(), uri)
|
|
||||||
|
|
||||||
# Facilitate non-RFC2616-compliant 'location' headers
|
# Facilitate non-RFC2616-compliant 'location' headers
|
||||||
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
||||||
# Compliant with RFC3986, we percent encode the url.
|
# Compliant with RFC3986, we percent encode the url.
|
||||||
@ -112,12 +109,12 @@ class SessionRedirectMixin(object):
|
|||||||
|
|
||||||
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
|
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
|
||||||
if (resp.status_code == codes.see_other and
|
if (resp.status_code == codes.see_other and
|
||||||
method != 'HEAD'):
|
prepared_request.method != 'HEAD'):
|
||||||
method = 'GET'
|
method = 'GET'
|
||||||
|
|
||||||
# Do what the browsers do, despite standards...
|
# Do what the browsers do, despite standards...
|
||||||
if (resp.status_code in (codes.moved, codes.found) and
|
if (resp.status_code in (codes.moved, codes.found) and
|
||||||
method not in ('GET', 'HEAD')):
|
prepared_request.method not in ('GET', 'HEAD')):
|
||||||
method = 'GET'
|
method = 'GET'
|
||||||
|
|
||||||
prepared_request.method = method
|
prepared_request.method = method
|
||||||
@ -211,10 +208,7 @@ class Session(SessionRedirectMixin):
|
|||||||
#: Should we trust the environment?
|
#: Should we trust the environment?
|
||||||
self.trust_env = True
|
self.trust_env = True
|
||||||
|
|
||||||
#: A CookieJar containing all currently outstanding cookies set on this
|
# Set up a CookieJar to be used by default
|
||||||
#: session. By default it is a
|
|
||||||
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
|
|
||||||
#: may be any other ``cookielib.CookieJar`` compatible object.
|
|
||||||
self.cookies = cookiejar_from_dict({})
|
self.cookies = cookiejar_from_dict({})
|
||||||
|
|
||||||
# Default connection adapters.
|
# Default connection adapters.
|
||||||
@ -228,46 +222,6 @@ class Session(SessionRedirectMixin):
|
|||||||
def __exit__(self, *args):
|
def __exit__(self, *args):
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
def prepare_request(self, request):
|
|
||||||
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
|
|
||||||
transmission and returns it. The :class:`PreparedRequest` has settings
|
|
||||||
merged from the :class:`Request <Request>` instance and those of the
|
|
||||||
:class:`Session`.
|
|
||||||
|
|
||||||
:param request: :class:`Request` instance to prepare with this
|
|
||||||
session's settings.
|
|
||||||
"""
|
|
||||||
cookies = request.cookies or {}
|
|
||||||
|
|
||||||
# Bootstrap CookieJar.
|
|
||||||
if not isinstance(cookies, cookielib.CookieJar):
|
|
||||||
cookies = cookiejar_from_dict(cookies)
|
|
||||||
|
|
||||||
# Merge with session cookies
|
|
||||||
merged_cookies = RequestsCookieJar()
|
|
||||||
merged_cookies.update(self.cookies)
|
|
||||||
merged_cookies.update(cookies)
|
|
||||||
|
|
||||||
|
|
||||||
# Set environment's basic authentication if not explicitly set.
|
|
||||||
auth = request.auth
|
|
||||||
if self.trust_env and not auth and not self.auth:
|
|
||||||
auth = get_netrc_auth(request.url)
|
|
||||||
|
|
||||||
p = PreparedRequest()
|
|
||||||
p.prepare(
|
|
||||||
method=request.method.upper(),
|
|
||||||
url=request.url,
|
|
||||||
files=request.files,
|
|
||||||
data=request.data,
|
|
||||||
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
|
|
||||||
params=merge_setting(request.params, self.params),
|
|
||||||
auth=merge_setting(auth, self.auth),
|
|
||||||
cookies=merged_cookies,
|
|
||||||
hooks=merge_setting(request.hooks, self.hooks),
|
|
||||||
)
|
|
||||||
return p
|
|
||||||
|
|
||||||
def request(self, method, url,
|
def request(self, method, url,
|
||||||
params=None,
|
params=None,
|
||||||
data=None,
|
data=None,
|
||||||
@ -311,22 +265,20 @@ class Session(SessionRedirectMixin):
|
|||||||
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
||||||
If Tuple, ('cert', 'key') pair.
|
If Tuple, ('cert', 'key') pair.
|
||||||
"""
|
"""
|
||||||
# Create the Request.
|
|
||||||
req = Request(
|
|
||||||
method = method.upper(),
|
|
||||||
url = url,
|
|
||||||
headers = headers,
|
|
||||||
files = files,
|
|
||||||
data = data or {},
|
|
||||||
params = params or {},
|
|
||||||
auth = auth,
|
|
||||||
cookies = cookies,
|
|
||||||
hooks = hooks,
|
|
||||||
)
|
|
||||||
prep = self.prepare_request(req)
|
|
||||||
|
|
||||||
|
cookies = cookies or {}
|
||||||
proxies = proxies or {}
|
proxies = proxies or {}
|
||||||
|
|
||||||
|
# Bootstrap CookieJar.
|
||||||
|
if not isinstance(cookies, cookielib.CookieJar):
|
||||||
|
cookies = cookiejar_from_dict(cookies)
|
||||||
|
|
||||||
|
# Merge with session cookies
|
||||||
|
merged_cookies = RequestsCookieJar()
|
||||||
|
merged_cookies.update(self.cookies)
|
||||||
|
merged_cookies.update(cookies)
|
||||||
|
cookies = merged_cookies
|
||||||
|
|
||||||
# Gather clues from the surrounding environment.
|
# Gather clues from the surrounding environment.
|
||||||
if self.trust_env:
|
if self.trust_env:
|
||||||
# Set environment's proxies.
|
# Set environment's proxies.
|
||||||
@ -334,6 +286,10 @@ class Session(SessionRedirectMixin):
|
|||||||
for (k, v) in env_proxies.items():
|
for (k, v) in env_proxies.items():
|
||||||
proxies.setdefault(k, v)
|
proxies.setdefault(k, v)
|
||||||
|
|
||||||
|
# Set environment's basic authentication.
|
||||||
|
if not auth:
|
||||||
|
auth = get_netrc_auth(url)
|
||||||
|
|
||||||
# Look for configuration.
|
# Look for configuration.
|
||||||
if not verify and verify is not False:
|
if not verify and verify is not False:
|
||||||
verify = os.environ.get('REQUESTS_CA_BUNDLE')
|
verify = os.environ.get('REQUESTS_CA_BUNDLE')
|
||||||
@ -343,11 +299,30 @@ class Session(SessionRedirectMixin):
|
|||||||
verify = os.environ.get('CURL_CA_BUNDLE')
|
verify = os.environ.get('CURL_CA_BUNDLE')
|
||||||
|
|
||||||
# Merge all the kwargs.
|
# Merge all the kwargs.
|
||||||
|
params = merge_setting(params, self.params)
|
||||||
|
headers = merge_setting(headers, self.headers, dict_class=CaseInsensitiveDict)
|
||||||
|
auth = merge_setting(auth, self.auth)
|
||||||
proxies = merge_setting(proxies, self.proxies)
|
proxies = merge_setting(proxies, self.proxies)
|
||||||
|
hooks = merge_setting(hooks, self.hooks)
|
||||||
stream = merge_setting(stream, self.stream)
|
stream = merge_setting(stream, self.stream)
|
||||||
verify = merge_setting(verify, self.verify)
|
verify = merge_setting(verify, self.verify)
|
||||||
cert = merge_setting(cert, self.cert)
|
cert = merge_setting(cert, self.cert)
|
||||||
|
|
||||||
|
# Create the Request.
|
||||||
|
req = Request()
|
||||||
|
req.method = method.upper()
|
||||||
|
req.url = url
|
||||||
|
req.headers = headers
|
||||||
|
req.files = files
|
||||||
|
req.data = data
|
||||||
|
req.params = params
|
||||||
|
req.auth = auth
|
||||||
|
req.cookies = cookies
|
||||||
|
req.hooks = hooks
|
||||||
|
|
||||||
|
# Prepare the Request.
|
||||||
|
prep = req.prepare()
|
||||||
|
|
||||||
# Send the request.
|
# Send the request.
|
||||||
send_kwargs = {
|
send_kwargs = {
|
||||||
'stream': stream,
|
'stream': stream,
|
||||||
@ -441,7 +416,7 @@ class Session(SessionRedirectMixin):
|
|||||||
|
|
||||||
# It's possible that users might accidentally send a Request object.
|
# It's possible that users might accidentally send a Request object.
|
||||||
# Guard against that specific failure case.
|
# Guard against that specific failure case.
|
||||||
if not isinstance(request, PreparedRequest):
|
if getattr(request, 'prepare', None):
|
||||||
raise ValueError('You can only send PreparedRequests.')
|
raise ValueError('You can only send PreparedRequests.')
|
||||||
|
|
||||||
# Set up variables needed for resolve_redirects and dispatching of
|
# Set up variables needed for resolve_redirects and dispatching of
|
||||||
@ -492,7 +467,7 @@ class Session(SessionRedirectMixin):
|
|||||||
"""Returns the appropriate connnection adapter for the given URL."""
|
"""Returns the appropriate connnection adapter for the given URL."""
|
||||||
for (prefix, adapter) in self.adapters.items():
|
for (prefix, adapter) in self.adapters.items():
|
||||||
|
|
||||||
if url.lower().startswith(prefix):
|
if url.startswith(prefix):
|
||||||
return adapter
|
return adapter
|
||||||
|
|
||||||
# Nothing matches :-/
|
# Nothing matches :-/
|
||||||
|
@ -18,8 +18,7 @@ _codes = {
|
|||||||
205: ('reset_content', 'reset'),
|
205: ('reset_content', 'reset'),
|
||||||
206: ('partial_content', 'partial'),
|
206: ('partial_content', 'partial'),
|
||||||
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
|
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
|
||||||
208: ('already_reported',),
|
208: ('im_used',),
|
||||||
226: ('im_used',),
|
|
||||||
|
|
||||||
# Redirection.
|
# Redirection.
|
||||||
300: ('multiple_choices',),
|
300: ('multiple_choices',),
|
||||||
|
@ -103,7 +103,7 @@ class CaseInsensitiveDict(collections.MutableMapping):
|
|||||||
|
|
||||||
# Copy is required
|
# Copy is required
|
||||||
def copy(self):
|
def copy(self):
|
||||||
return CaseInsensitiveDict(self._store.values())
|
return CaseInsensitiveDict(self._store.values())
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
|
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
|
||||||
|
@ -22,7 +22,6 @@ from . import __version__
|
|||||||
from . import certs
|
from . import certs
|
||||||
from .compat import parse_http_list as _parse_list_header
|
from .compat import parse_http_list as _parse_list_header
|
||||||
from .compat import quote, urlparse, bytes, str, OrderedDict, urlunparse
|
from .compat import quote, urlparse, bytes, str, OrderedDict, urlunparse
|
||||||
from .compat import getproxies, proxy_bypass
|
|
||||||
from .cookies import RequestsCookieJar, cookiejar_from_dict
|
from .cookies import RequestsCookieJar, cookiejar_from_dict
|
||||||
from .structures import CaseInsensitiveDict
|
from .structures import CaseInsensitiveDict
|
||||||
|
|
||||||
@ -302,7 +301,7 @@ def stream_decode_response_unicode(iterator, r):
|
|||||||
rv = decoder.decode(chunk)
|
rv = decoder.decode(chunk)
|
||||||
if rv:
|
if rv:
|
||||||
yield rv
|
yield rv
|
||||||
rv = decoder.decode(b'', final=True)
|
rv = decoder.decode('', final=True)
|
||||||
if rv:
|
if rv:
|
||||||
yield rv
|
yield rv
|
||||||
|
|
||||||
@ -387,34 +386,37 @@ def requote_uri(uri):
|
|||||||
def get_environ_proxies(url):
|
def get_environ_proxies(url):
|
||||||
"""Return a dict of environment proxies."""
|
"""Return a dict of environment proxies."""
|
||||||
|
|
||||||
|
proxy_keys = [
|
||||||
|
'all',
|
||||||
|
'http',
|
||||||
|
'https',
|
||||||
|
'ftp',
|
||||||
|
'socks'
|
||||||
|
]
|
||||||
|
|
||||||
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
|
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
|
||||||
|
|
||||||
# First check whether no_proxy is defined. If it is, check that the URL
|
# First check whether no_proxy is defined. If it is, check that the URL
|
||||||
# we're getting isn't in the no_proxy list.
|
# we're getting isn't in the no_proxy list.
|
||||||
no_proxy = get_proxy('no_proxy')
|
no_proxy = get_proxy('no_proxy')
|
||||||
netloc = urlparse(url).netloc
|
|
||||||
|
|
||||||
if no_proxy:
|
if no_proxy:
|
||||||
# We need to check whether we match here. We need to see if we match
|
# We need to check whether we match here. We need to see if we match
|
||||||
# the end of the netloc, both with and without the port.
|
# the end of the netloc, both with and without the port.
|
||||||
no_proxy = no_proxy.split(',')
|
no_proxy = no_proxy.split(',')
|
||||||
|
netloc = urlparse(url).netloc
|
||||||
|
|
||||||
for host in no_proxy:
|
for host in no_proxy:
|
||||||
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
|
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
|
||||||
# The URL does match something in no_proxy, so we don't want
|
# The URL does match something in no_proxy, so we don't want
|
||||||
# to apply the proxies on this URL.
|
# to apply the proxies on this URL.
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
# If the system proxy settings indicate that this URL should be bypassed,
|
|
||||||
# don't proxy.
|
|
||||||
if proxy_bypass(netloc):
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# If we get here, we either didn't have no_proxy set or we're not going
|
# If we get here, we either didn't have no_proxy set or we're not going
|
||||||
# anywhere that no_proxy applies to, and the system settings don't require
|
# anywhere that no_proxy applies to.
|
||||||
# bypassing the proxy for the current URL.
|
proxies = [(key, get_proxy(key + '_proxy')) for key in proxy_keys]
|
||||||
return getproxies()
|
return dict([(key, val) for (key, val) in proxies if val])
|
||||||
|
|
||||||
|
|
||||||
def default_user_agent():
|
def default_user_agent():
|
||||||
"""Return a string representing the default user agent."""
|
"""Return a string representing the default user agent."""
|
||||||
|
Loading…
Reference in New Issue
Block a user