From 352dbc3856d47fe2f4f64a07d44998e23e285e89 Mon Sep 17 00:00:00 2001 From: James Pickering Date: Tue, 27 Jun 2017 16:15:05 +0100 Subject: [PATCH 1/4] Fix #3844 --- requests/adapters.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/requests/adapters.py b/requests/adapters.py index d3b2d5bb1e..313c94e312 100644 --- a/requests/adapters.py +++ b/requests/adapters.py @@ -506,6 +506,10 @@ def send( low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + if hasattr(conn, 'proxy'): + if conn.proxy is not None and not getattr(low_conn, 'sock', None): + conn._prepare_proxy(low_conn) + try: skip_host = "Host" in request.headers low_conn.putrequest( From e7773451a23a1e020d14aa90cc58612cc9b06ce6 Mon Sep 17 00:00:00 2001 From: Leon Verrall Date: Tue, 2 Jul 2019 10:17:33 +0100 Subject: [PATCH 2/4] Use urllib for chunked requests AGAIN --- requests/adapters.py | 74 ++++++++------------------------------------ 1 file changed, 13 insertions(+), 61 deletions(-) diff --git a/requests/adapters.py b/requests/adapters.py index 313c94e312..c635fea98c 100644 --- a/requests/adapters.py +++ b/requests/adapters.py @@ -485,67 +485,19 @@ def send( timeout = TimeoutSauce(connect=timeout, read=timeout) try: - if not chunked: - resp = conn.urlopen( - method=request.method, - url=url, - body=request.body, - headers=request.headers, - redirect=False, - assert_same_host=False, - preload_content=False, - decode_content=False, - retries=self.max_retries, - timeout=timeout, - ) - - # Send the request. - else: - if hasattr(conn, "proxy_pool"): - conn = conn.proxy_pool - - low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) - - if hasattr(conn, 'proxy'): - if conn.proxy is not None and not getattr(low_conn, 'sock', None): - conn._prepare_proxy(low_conn) - - try: - skip_host = "Host" in request.headers - low_conn.putrequest( - request.method, - url, - skip_accept_encoding=True, - skip_host=skip_host, - ) - - for header, value in request.headers.items(): - low_conn.putheader(header, value) - - low_conn.endheaders() - - for i in request.body: - low_conn.send(hex(len(i))[2:].encode("utf-8")) - low_conn.send(b"\r\n") - low_conn.send(i) - low_conn.send(b"\r\n") - low_conn.send(b"0\r\n\r\n") - - # Receive the response from the server - r = low_conn.getresponse() - - resp = HTTPResponse.from_httplib( - r, - pool=conn, - connection=low_conn, - preload_content=False, - decode_content=False, - ) - except Exception: - # If we hit any problems here, clean up the connection. - # Then, raise so that we can handle the actual exception. - low_conn.close() - raise + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked + ) except (ProtocolError, OSError) as err: raise ConnectionError(err, request=request) From 0760b514180594933b60d76c6da0f805065ed6c7 Mon Sep 17 00:00:00 2001 From: Levi Blaney Date: Wed, 31 Aug 2022 18:07:56 -0700 Subject: [PATCH 3/4] add missing comma for linting --- requests/adapters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requests/adapters.py b/requests/adapters.py index c635fea98c..376c02f072 100644 --- a/requests/adapters.py +++ b/requests/adapters.py @@ -496,7 +496,7 @@ def send( decode_content=False, retries=self.max_retries, timeout=timeout, - chunked=chunked + chunked=chunked, ) except (ProtocolError, OSError) as err: From 8339c3941f2fcb6e302e3b1720cb7597f4004e3c Mon Sep 17 00:00:00 2001 From: Levi Blaney Date: Thu, 1 Sep 2022 02:46:09 +0000 Subject: [PATCH 4/4] remove unused import --- requests/adapters.py | 1 - 1 file changed, 1 deletion(-) diff --git a/requests/adapters.py b/requests/adapters.py index 376c02f072..f13ae4e5e2 100644 --- a/requests/adapters.py +++ b/requests/adapters.py @@ -22,7 +22,6 @@ from urllib3.exceptions import ReadTimeoutError, ResponseError from urllib3.exceptions import SSLError as _SSLError from urllib3.poolmanager import PoolManager, proxy_from_url -from urllib3.response import HTTPResponse from urllib3.util import Timeout as TimeoutSauce from urllib3.util import parse_url from urllib3.util.retry import Retry