Normally, requests
unwraps the MaxRetryError
exception for you, but not for this specific exception. If you can’t upgrade to 2.9.2 or newer you can catch it specifically (unwrapping two layers now):
from requests.exceptions import ConnectionError from requests.packages.urllib3.exceptions import MaxRetryError from requests.packages.urllib3.exceptions import ProxyError as urllib3_ProxyError try: #... except ConnectionError as ce: if (isinstance(ce.args[0], MaxRetryError) and isinstance(ce.args[0].reason, urllib3_ProxyError)): # oops, requests should have handled this, but didn 't. # see https: //github.com/kennethreitz/requests/issues/3050 pass
requests.exceptions.ConnectionError() , requests.exceptions() , .RequestException()
def test_neutron_exception_is_raised_on_any_request_error(self):
# timeout exception raises InfobloxTimeoutError
f = mock.Mock()
f.__name__ = 'mock'
f.side_effect = req_exc.Timeout
self.assertRaises(exceptions.InfobloxTimeoutError,
connector.reraise_neutron_exception(f))
# all other request exception raises InfobloxConnectionError
supported_exceptions = [req_exc.HTTPError,
req_exc.ConnectionError,
req_exc.ProxyError,
req_exc.SSLError,
req_exc.TooManyRedirects,
req_exc.InvalidURL
]
for ex in supported_exceptions:
f.side_effect = ex
self.assertRaises(exceptions.InfobloxConnectionError,
connector.reraise_neutron_exception(f))
def exception_handle(method):
""
"Handle exception raised by requests library."
""
def wrapper( * args, ** kwargs):
try:
result = method( * args, ** kwargs)
return result
except ProxyError:
LOG.exception('ProxyError when try to get %s.', args)
raise ProxyError('A proxy error occurred.')
except ConnectionException:
LOG.exception('ConnectionError when try to get %s.', args)
raise ConnectionException('DNS failure, refused connection, etc.')
except Timeout:
LOG.exception('Timeout when try to get %s', args)
raise Timeout('The request timed out.')
except RequestException:
LOG.exception('RequestException when try to get %s.', args)
raise RequestException('Please check out your network.')
return wrapper
def get_url(url):
headers['Referer'] = url
count = 0
while True:
count += 1
if count < settings['maxtries']:
proxy = get_proxy()
else:
proxy = None
try:
resp = request('get', url, headers = headers, proxies = {
'http': proxy
})
return resp
except ProxyError:
if count > settings['maxtries'] + 2:
print('Exit: Can not get url.<@get_url>')
exit(1)
continue
def get_url(url):
headers['Referer'] = url
count = 0
while True:
count += 1
if count < settings['maxtries']:
proxy = get_proxy()
else:
proxy = None
try:
resp = request('get', url, headers = headers, proxies = {
'http': proxy
})
return resp
except ProxyError:
if count > settings['maxtries'] + 2:
print('Exit: Could not get url.<@get_url>')
exit(1)
continue
def test_no_proxy_domain_fail(self, socks5_proxy): instance = { 'proxy': { 'http': 'http://1.2.3.4:567', 'no_proxy': '.google.com,example.com,example,9' } } init_config = {} http = RequestsWrapper(instance, init_config) # no_proxy not match: .google.com # ".y.com" matches "x.y.com" but not "y.com" with pytest.raises((ConnectTimeout, ProxyError)): http.get('http://google.com', timeout = 1) # no_proxy not match: example or example.com with pytest.raises((ConnectTimeout, ProxyError)): http.get('http://notexample.com', timeout = 1) with pytest.raises((ConnectTimeout, ProxyError)): http.get('http://example.org', timeout = 1) # no_proxy not match: 9 with pytest.raises((ConnectTimeout, ProxyError)): http.get('http://127.0.0.99', timeout = 1)
def delete(self): "" "Delete the Case Management Object. If no id is present in the obj then returns immediately. "" " if not self.id: # pragma: no cover self.tcex.log.warning('A case without an ID cannot be deleted.') return url = f '{self.api_endpoint}/{self.id}' r = None try: r = self.tcex.session.delete(url) self.tcex.log.debug( f 'Method: ({r.request.method.upper()}), ' f 'Status Code: {r.status_code}, ' f 'URl: ({r.url})' ) except(ConnectionError, ProxyError): # pragma: no cover self.tcex.handle_error( 951, ['OPTIONS', 407, '{\"message\": \"Connection Error\"}', self.api_endpoint] ) if len(r.content) < 5000: self.tcex.log.debug(u 'response text: {}'.format(r.text)) else: # pragma: no cover self.tcex.log.debug(u 'response text: (text to large to log)') if not self.success(r): err = r.text or r.reason if r.status_code == 404: self.tcex.handle_error(952, [r.request.method.upper(), r.status_code, err, r.url]) self.tcex.handle_error(950, [r.status_code, err, r.url]) return
I tried running some requests in Python to see if they will appear in Fiddler,What is causing Python to stop working (only while Fiddler is running)? My current Fiddler version is v5.0.20204.45441 which is, I know, an old version. I trusted the root certificate, even installed it manually, and the HTTPS traffic from my browser is displayed normally as it should.,However, trying to send a request over HTTPS results in long blocking and eventually fails. Nothing is logged to Fiddler, because no actual request was made. After 20-30 seconds, a long error is raised in Python:,Alternatively, you can try setting your proxy manually with code before the tests request:
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host = 'www.google.com', port = 443): Max retries exceeded with url: /?id=1234567890 (Caused by ProxyError('Your proxy appears to only use HTTP and not HTTPS, try changing your proxy URL to be HTTP. See: https:/ / urllib3.readthedocs.io / en / 1.26.x / advanced - usage.html #https - proxy - error - http - proxy ', SSLError(SSLError(1, ' [SSL: WRONG_VERSION_NUMBER] wrong version number(_ssl.c: 1129)
'))))
import requests
...
proxies = {
'http': 'http://127.0.0.1:8888',
'https': 'http://127.0.0.1:8888'
}
response = requests.post(url, proxies = proxies)
What Lini said sounds like true and I found a workaround to install older version of urllib3.
pip install--upgrade urllib3 == 1.25 .11
The HTTPStatusError class is raised by response.raise_for_status() on responses which are not a 2xx success code. These exceptions include both a .request and a .response attribute.,The RequestError class is a superclass that encompasses any exception that occurs while issuing an HTTP request. These exceptions include a .request attribute.,The most important exception classes in HTTPX are RequestError and HTTPStatusError.,There is also a base class HTTPError that includes both of these categories, and can be used to catch either failed requests, or 4xx and 5xx responses.
try:
response = httpx.get("https://www.example.com/")
except httpx.RequestError as exc:
print(f "An error occurred while requesting {exc.request.url!r}.")
response = httpx.get("https://www.example.com/")
try:
response.raise_for_status()
except httpx.HTTPStatusError as exc:
print(f "Error response {exc.response.status_code} while requesting {exc.request.url!r}.")
try:
response = httpx.get("https://www.example.com/")
response.raise_for_status()
except httpx.HTTPError as exc:
print(f "Error while requesting {exc.request.url!r}.")
try:
response = httpx.get("https://www.example.com/")
response.raise_for_status()
except httpx.RequestError as exc:
print(f "An error occurred while requesting {exc.request.url!r}.")
except httpx.HTTPStatusError as exc:
print(f "Error response {exc.response.status_code} while requesting {exc.request.url!r}.")
try:
response = httpx.get("https://www.example.com")
response.raise_for_status()
except httpx.HTTPError as exc:
print(f "HTTP Exception for {exc.request.url} - {exc}")
Example2: If I want to send a request every time mitmproxy catch a request to 'github.com'("Use a proxy server" is enabled):,This problem has nothing to do with TLS decryption of requests sent by the requests library. This is about using the requests library to send requests inside the mimtproxy.,Once the Windows "Use a proxy server" option is enabled, requests library in my addon will not work. Example1:,However, when I disable the "Use a proxy server" option(which means traffic will not pass through mitmproxy),
import requests
class example:
def __init__(self):
print('example loaded!')
r = requests.get('https://mitmproxy.org/', timeout = 5)
print(r)
print('Done!')
addons = [
example()
]
example loaded!
Loading script .\solve.py
in script .\solve.py: HTTPSConnectionPool(host='mitmproxy.org', port=443): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x000002ACDF46B040>: Failed to establish a new connection: [WinError 10061] No connection could be made because the target machine actively refused it')))
Proxy server listening at http://*:8080
example loaded!
<Response [200]>
Done!
import requests
class example:
def __init__(self):
print('example loaded!')
def request(self, flow):
if flow.request.host == 'github.com':
r = requests.get('https://mitmproxy.org/', timeout = 5)
print('github!')
addons = [
example()
]
import requests
class example:
def __init__(self):
print('example loaded!')
def response(self, flow):
print('response!')
if flow.request.host == 'github.com':
r = requests.get('https://mitmproxy.org/')
print(r)
print('github!')
addons = [
example()
]
Fixed a bug where Requests would not correctly correlate cookies set when using custom Host headers if those Host headers did not use the native string type for the platform.,Fixed issue where responses whose body had not been fully consumed would have the underlying connection closed but not returned to the connection pool, which could cause Requests to hang in situations where the HTTPAdapter had been configured to use a blocking connection pool.,Fixed a bug when sending JSON data that could cause us to encounter obscure OpenSSL errors in certain network conditions (yes, really).,Fixed an issue encountered by some Requests plugins where constructing a Response object would cause Response.content to raise an AttributeError.
To install Requests, simply:
$ pip install requests✨🍰✨