aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2021-07-14 00:45:27 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2021-07-14 00:45:27 +0000
commitc77fb8f7e5205af804fc9ce06bf8f83097921563 (patch)
treeb977619bbbb98d1db7c5f422e9b679560e923023
parent31f091c0ae1c78c65214d8c6c94938e00fe9f6e2 (diff)
parent9dc6376378cd6146db0c17ccc362861b78bad581 (diff)
downloadhttplib2-c77fb8f7e5205af804fc9ce06bf8f83097921563.tar.gz
Change-Id: Ib087321a35dd41891c9f886ee125b8533d8d863e
-rw-r--r--.travis.yml9
-rw-r--r--Android.bp36
-rw-r--r--CHANGELOG49
-rw-r--r--MANIFEST.in7
-rw-r--r--METADATA12
l---------NOTICE1
-rw-r--r--OWNERS1
-rw-r--r--SECURITY.md19
-rw-r--r--TEST_MAPPING8
-rw-r--r--pyproject.toml9
-rw-r--r--python2/httplib2/__init__.py57
-rw-r--r--python2/httplib2/socks.py10
-rw-r--r--python3/httplib2/__init__.py62
-rw-r--r--python3/httplib2/socks.py10
-rw-r--r--script/compile-py3-openssl11.sh6
-rwxr-xr-xscript/release21
-rwxr-xr-xsetup.py2
-rw-r--r--tests/__init__.py4
-rw-r--r--tests/test_http.py90
-rw-r--r--tests/test_proxy.py68
20 files changed, 386 insertions, 95 deletions
diff --git a/.travis.yml b/.travis.yml
index ca91b60..ed1aa72 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -10,14 +10,12 @@ cache:
env:
global:
- - pip_install_common='pip>=9.0 setuptools>=36.2 wheel>=0.30'
+ - pip_install_common='pip>=9.0 setuptools>=43.0 wheel>=0.30'
python:
- 2.7
- 3.5
- 3.6
- 3.7
-matrix:
- fast_finish: true
install: pip install $pip_install_common 'codecov>=2.0.15' -r requirements-test.txt
script: script/test -sv && codecov
@@ -25,6 +23,7 @@ stages:
- test
- release
jobs:
+ fast_finish: true
include:
- stage: test
python: pypy
@@ -69,7 +68,7 @@ jobs:
deploy:
provider: pypi
server: https://test.pypi.org/legacy/
- user: httplib2.release.test
+ username: httplib2.release.test
password:
secure: "XN3oxobC+26UPiS+F1MvL4c6XtytejZ13SkLXCHfgVDPSASzKqF81CnR4EhsnbfZLvSgGHgSlfY5Jve5HF2VR9GzpJMc6wzcfkkeBg6PeRHuMppIqmuoq7BTw81SZL9X62/mju/vxXs2cHpVkwNTSE7W1JH1bVXPj86oAR9xXo9waRuXcyPGNuSqmOd1NPOMbFmeuz+HeArk2Fz7wwo6H5BJuXjOrEOHWD1rzeRchH901PBUrftm54Id2TIVMARW8jm1saQY2FtPWaBv2v/DJC1fKWMJpcNQ3mmcvrrTFC1IJ00dk9XJfqx5hnsRaergc0UvzHoOGEQKSMdg0PUAkvNohAoCf+3GddPkvk8MaZ+aQlijoK6wp93A5dfTxBVZqdhmEdheolbYiJPunzS60bWvaEv6/D15/xyMiwGamUmF1Tx7UIvvm/zj6tAOBWbNEgLRyvQ0qx2RE95GLtp+RXK4pT+Kig1+cof5hrWODuEl1SSLMBySaNLWO73IN9esZu0X1JS7svnROLRJCAvRjppJYswwCPziP+B8XQDeMrhIDMHNzdbtxOPpBAXpYUE764FkzaUTMsK83Q+ugE3Dx8xtrAzT4M0fdiFv+3QEhSUtfvWsLH9zS9wXC5Px9kPKU3FO8mdUyf7A0bIasvJLNcApDJigKjBukToOIsZVFok="
distributions: "sdist bdist_wheel"
@@ -84,7 +83,7 @@ jobs:
script: script/release -auto
deploy:
provider: pypi
- user: httplib2.release
+ username: httplib2.release
password:
secure: "jZAyMFnmbhYChjsb3gRYfESWlio6pgmWEWBRxtBQXYZf+tzyKVISyHuyWkJvOVTP+lOpp2MTPZ2s1UgxGwfzZ/VE034Cz5iA/C6wafmgtSW+wK+KEJFPseHBBA7Gh4ReiAPi2a+i1UXdsJpFNhv36E9tbTq2sEinbisE2lSEQ0KHadjkc+6pvCjlyhmes7QyM5GviWYlWRNj2OIkT8SUuUcWQt7ZEl6kN82MoMHCaf1YxE/i4JUP3VLomWK3RLZJP356Y4IDkzlVhFU4MJ4ubNtoQ/ECM0uQ+nsHzO0k1uGWdF6mMTna7U5gLqUi9rfCK3bLMeVSo+TUCpjI7HkWDaBgVXGTe5dUMJCDfRgqeYa0GnriI74XYJu8NGjMLv30uO58t9E7VQGo2NrFRJDzKAIHANejWnpUPY3XgoN1rlrh52seMjaU2+jO40EC8HvIqeRRwPwhkqCSV2y+IZT2bOFp2nbMWhkUMsxIX7OXt+sy8GvK/ilMleEl7r0tnudsT7lGdnMwXlttI3CIAFGE7E+0zwnxNiMzQDzo+ILVR7ezrCK9M9xVYKGa3i8gkpSn0Fblnltgg7HaEI8YC3rMZe4iu1t0D6cZZUAAp2ZUo3NCJcZ35iUFBhlFvjVDbe2upJgU6GFgtDLjyzCJiKbz8qLRgMFYgT0CGr512e1jBo0="
distributions: "sdist bdist_wheel"
diff --git a/Android.bp b/Android.bp
index 05d69ac..c77abc6 100644
--- a/Android.bp
+++ b/Android.bp
@@ -12,6 +12,42 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ default_applicable_licenses: ["external_python_httplib2_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+ name: "external_python_httplib2_license",
+ visibility: [":__subpackages__"],
+ license_kinds: [
+ "SPDX-license-identifier-Apache-2.0",
+ "SPDX-license-identifier-BSD",
+ "SPDX-license-identifier-GPL",
+ "SPDX-license-identifier-GPL-2.0",
+ "SPDX-license-identifier-LGPL-2.1",
+ "SPDX-license-identifier-MIT",
+ "SPDX-license-identifier-MPL",
+ "SPDX-license-identifier-MPL-1.1",
+ ],
+ license_text: [
+ "LICENSE",
+ ],
+}
+
filegroup {
// "cacerts.txt" are identical save for the fact that py3 cacerts.txt has
// a newline at the end while py2 cacerts.txt doesn't.
diff --git a/CHANGELOG b/CHANGELOG
index 07fb949..2db1cc0 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,52 @@
+0.18.1
+
+ explicit build-backend workaround for pip build isolation bug
+ "AttributeError: 'module' object has no attribute '__legacy__'" on pip install
+ https://github.com/httplib2/httplib2/issues/169
+
+0.18.0
+
+ IMPORTANT security vulnerability CWE-93 CRLF injection
+ Force %xx quote of space, CR, LF characters in uri.
+ Special thanks to Recar https://github.com/Ciyfly for discrete notification.
+ https://cwe.mitre.org/data/definitions/93.html
+
+0.17.4
+
+ Ship test suite in source dist
+ https://github.com/httplib2/httplib2/pull/168
+
+0.17.3
+
+ IronPython2.7: relative import iri2uri fixes ImportError
+ https://github.com/httplib2/httplib2/pull/163
+
+0.17.2
+
+ python3 + debug + IPv6 disabled: https raised
+ "IndexError: Replacement index 1 out of range for positional args tuple"
+ https://github.com/httplib2/httplib2/issues/161
+
+0.17.1
+
+ python3: no_proxy was not checked with https
+ https://github.com/httplib2/httplib2/issues/160
+
+0.17.0
+
+ feature: Http().redirect_codes set, works after follow(_all)_redirects check
+ This allows one line workaround for old gcloud library that uses 308
+ response without redirect semantics.
+ https://github.com/httplib2/httplib2/issues/156
+
+0.16.0
+
+ IMPORTANT cache invalidation change, fix 307 keep method, add 308 Redirects
+ https://github.com/httplib2/httplib2/issues/151
+
+ proxy: username/password as str compatible with pysocks
+ https://github.com/httplib2/httplib2/issues/154
+
0.15.0
python2: regression in connect() error handling
diff --git a/MANIFEST.in b/MANIFEST.in
index 12c4cc7..412def6 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,11 @@
recursive-include python2 *.py *.txt
recursive-include python3 *.py *.txt
+graft test
+graft tests
+include *.md
+include CHANGELOG
+include LICENSE
include python2/httplib2/test/*.txt
include requirements*.txt
+global-exclude __pycache__
+global-exclude *.py[cod]
diff --git a/METADATA b/METADATA
index 8b3c1c6..bcdbd44 100644
--- a/METADATA
+++ b/METADATA
@@ -9,10 +9,14 @@ third_party {
type: GIT
value: "https://github.com/httplib2/httplib2.git"
}
- version: "v0.15.0"
+ version: "v0.18.1"
+ # would be NOTICE save for:
+ # test/other_cacerts.txt
+ # doc/html/_static/jquery.js
+ license_type: RESTRICTED
last_upgrade_date {
- year: 2019
- month: 12
- day: 23
+ year: 2020
+ month: 5
+ day: 20
}
}
diff --git a/NOTICE b/NOTICE
deleted file mode 120000
index 7a694c9..0000000
--- a/NOTICE
+++ /dev/null
@@ -1 +0,0 @@
-LICENSE \ No newline at end of file
diff --git a/OWNERS b/OWNERS
index e6fbf8d..110ac37 100644
--- a/OWNERS
+++ b/OWNERS
@@ -2,4 +2,3 @@
# or people with more than 10 commits last year.
# Please update this list if you find better owner candidates.
herbertxue@google.com
-yim@google.com
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..5eb3903
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,19 @@
+# Security Policy
+
+## Supported Versions
+
+master branch and latest release get priority support. You should expect all known problems fixed in master.
+
+All other released versions receive security updates per request.
+If you use some old version and can not upgrade for any or no reason, ask for security update release, most likely you will get it.
+
+## Reporting a Vulnerability
+
+Contact current maintainers. At 2020-05: temotor@gmail.com or https://t.me/temotor
+If that doesn't work, open Github issue just asking for private communication channel.
+
+This is volunteer maintained project, all issues are processed on best effort basis, no deadlines promised. Of course, security vulnerabilities get priority over regular issues.
+
+You can expect fame in history or maybe you prefer anonymity - say what you prefer.
+
+Thank you for responsible handling of security problems. Your attention and effort are appreciated.
diff --git a/TEST_MAPPING b/TEST_MAPPING
deleted file mode 100644
index 61a80b2..0000000
--- a/TEST_MAPPING
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "presubmit" : [
- {
- "name" : "acloud_test",
- "host" : true
- }
- ]
-}
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..5f7cbbd
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,9 @@
+[build-system]
+requires = ["setuptools >= 40.8.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.black]
+line-length = 121
+
+[tool.check-manifest]
+ignore = [".travis.yml", "script/*", "*.tex"]
diff --git a/python2/httplib2/__init__.py b/python2/httplib2/__init__.py
index c8302eb..f35ba48 100644
--- a/python2/httplib2/__init__.py
+++ b/python2/httplib2/__init__.py
@@ -19,7 +19,7 @@ __contributors__ = [
"Alex Yu",
]
__license__ = "MIT"
-__version__ = '0.15.0'
+__version__ = "0.18.1"
import base64
import calendar
@@ -129,7 +129,7 @@ if ssl is None:
_ssl_wrap_socket = _ssl_wrap_socket_unsupported
if sys.version_info >= (2, 3):
- from iri2uri import iri2uri
+ from .iri2uri import iri2uri
else:
def iri2uri(uri):
@@ -291,6 +291,12 @@ HOP_BY_HOP = [
"upgrade",
]
+# https://tools.ietf.org/html/rfc7231#section-8.1.3
+SAFE_METHODS = ("GET", "HEAD") # TODO add "OPTIONS", "TRACE"
+
+# To change, assign to `Http().redirect_codes`
+REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308))
+
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
@@ -1175,9 +1181,9 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
host = self.host
port = self.port
-
+
socket_err = None
-
+
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
@@ -1353,9 +1359,9 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
host = self.host
port = self.port
-
+
socket_err = None
-
+
address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
for family, socktype, proto, canonname, sockaddr in address_info:
try:
@@ -1661,10 +1667,14 @@ class Http(object):
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
+ self.redirect_codes = REDIRECT_CODES
+
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
+ self.safe_methods = list(SAFE_METHODS)
+
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
@@ -1858,10 +1868,10 @@ class Http(object):
if (
self.follow_all_redirects
- or (method in ["GET", "HEAD"])
- or response.status == 303
+ or method in self.safe_methods
+ or response.status in (303, 308)
):
- if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
+ if self.follow_redirects and response.status in self.redirect_codes:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
@@ -1881,7 +1891,7 @@ class Http(object):
response["location"] = urlparse.urljoin(
absolute_uri, location
)
- if response.status == 301 and method in ["GET", "HEAD"]:
+ if response.status == 308 or (response.status == 301 and method in self.safe_methods):
response["-x-permanent-redirect-url"] = response["location"]
if "content-location" not in response:
response["content-location"] = absolute_uri
@@ -1918,7 +1928,7 @@ class Http(object):
response,
content,
)
- elif response.status in [200, 203] and method in ["GET", "HEAD"]:
+ elif response.status in [200, 203] and method in self.safe_methods:
# Don't cache 206's since we aren't going to handle byte range requests
if "content-location" not in response:
response["content-location"] = absolute_uri
@@ -1975,6 +1985,9 @@ class Http(object):
headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
+ # Prevent CWE-75 space injection to manipulate request via part of uri.
+ # Prevent CWE-93 CRLF injection to modify headers via part of uri.
+ uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A")
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
@@ -2018,6 +2031,7 @@ class Http(object):
headers["accept-encoding"] = "gzip, deflate"
info = email.Message.Message()
+ cachekey = None
cached_value = None
if self.cache:
cachekey = defrag_uri.encode("utf-8")
@@ -2038,8 +2052,6 @@ class Http(object):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
- else:
- cachekey = None
if (
method in self.optimistic_concurrency_methods
@@ -2051,13 +2063,15 @@ class Http(object):
# http://www.w3.org/1999/04/Editing/
headers["if-match"] = info["etag"]
- if method not in ["GET", "HEAD"] and self.cache and cachekey:
- # RFC 2616 Section 13.10
+ # https://tools.ietf.org/html/rfc7234
+ # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location
+ # when a non-error status code is received in response to an unsafe request method.
+ if self.cache and cachekey and method not in self.safe_methods:
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
- if method in ["GET", "HEAD"] and "vary" in info:
+ if method in self.safe_methods and "vary" in info:
vary = info["vary"]
vary_headers = vary.lower().replace(" ", "").split(",")
for header in vary_headers:
@@ -2068,11 +2082,14 @@ class Http(object):
break
if (
- cached_value
- and method in ["GET", "HEAD"]
- and self.cache
+ self.cache
+ and cached_value
+ and (method in self.safe_methods or info["status"] == "308")
and "range" not in headers
):
+ redirect_method = method
+ if info["status"] not in ("307", "308"):
+ redirect_method = "GET"
if "-x-permanent-redirect-url" in info:
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
@@ -2083,7 +2100,7 @@ class Http(object):
)
(response, new_content) = self.request(
info["-x-permanent-redirect-url"],
- method="GET",
+ method=redirect_method,
headers=headers,
redirections=redirections - 1,
)
diff --git a/python2/httplib2/socks.py b/python2/httplib2/socks.py
index 5cef776..71eb4eb 100644
--- a/python2/httplib2/socks.py
+++ b/python2/httplib2/socks.py
@@ -238,7 +238,15 @@ class socksocket(socket.socket):
headers - Additional or modified headers for the proxy connect
request.
"""
- self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
+ self.__proxy = (
+ proxytype,
+ addr,
+ port,
+ rdns,
+ username.encode() if username else None,
+ password.encode() if password else None,
+ headers,
+ )
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
diff --git a/python3/httplib2/__init__.py b/python3/httplib2/__init__.py
index d8c3d34..cf2db60 100644
--- a/python3/httplib2/__init__.py
+++ b/python3/httplib2/__init__.py
@@ -15,7 +15,7 @@ __contributors__ = [
"Alex Yu",
]
__license__ = "MIT"
-__version__ = '0.15.0'
+__version__ = "0.18.1"
import base64
import calendar
@@ -161,6 +161,13 @@ HOP_BY_HOP = [
"upgrade",
]
+# https://tools.ietf.org/html/rfc7231#section-8.1.3
+SAFE_METHODS = ("GET", "HEAD", "OPTIONS", "TRACE")
+
+# To change, assign to `Http().redirect_codes`
+REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308))
+
+
from httplib2 import certs
CA_CERTS = certs.where()
@@ -315,7 +322,7 @@ def _parse_cache_control(headers):
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
-# Set to true to turn on, usefull for testing servers.
+# Set to true to turn on, useful for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
@@ -1004,10 +1011,10 @@ class ProxyInfo(object):
proxy_headers: Additional or modified headers for the proxy connect
request.
"""
- if isinstance(proxy_user, str):
- proxy_user = proxy_user.encode()
- if isinstance(proxy_pass, str):
- proxy_pass = proxy_pass.encode()
+ if isinstance(proxy_user, bytes):
+ proxy_user = proxy_user.decode()
+ if isinstance(proxy_pass, bytes):
+ proxy_pass = proxy_pass.decode()
self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers = (
proxy_type,
proxy_host,
@@ -1277,7 +1284,7 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
def connect(self):
"""Connect to a host on a given (SSL) port."""
- if self.proxy_info and self.proxy_info.isgood():
+ if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host):
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
self.proxy_info.astuple()
@@ -1359,7 +1366,7 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
except socket.error as e:
socket_err = e
if self.debuglevel > 0:
- print("connect fail: ({0}, {1})".format((self.host, self.port)))
+ print("connect fail: ({0}, {1})".format(self.host, self.port))
if use_proxy:
print(
"proxy: {0}".format(
@@ -1467,10 +1474,14 @@ class Http(object):
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
+ self.redirect_codes = REDIRECT_CODES
+
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
+ self.safe_methods = list(SAFE_METHODS)
+
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
@@ -1663,10 +1674,10 @@ class Http(object):
if (
self.follow_all_redirects
- or (method in ["GET", "HEAD"])
- or response.status == 303
+ or method in self.safe_methods
+ or response.status in (303, 308)
):
- if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
+ if self.follow_redirects and response.status in self.redirect_codes:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
@@ -1686,7 +1697,7 @@ class Http(object):
response["location"] = urllib.parse.urljoin(
absolute_uri, location
)
- if response.status == 301 and method in ["GET", "HEAD"]:
+ if response.status == 308 or (response.status == 301 and (method in self.safe_methods)):
response["-x-permanent-redirect-url"] = response["location"]
if "content-location" not in response:
response["content-location"] = absolute_uri
@@ -1723,7 +1734,7 @@ class Http(object):
response,
content,
)
- elif response.status in [200, 203] and method in ["GET", "HEAD"]:
+ elif response.status in [200, 203] and method in self.safe_methods:
# Don't cache 206's since we aren't going to handle byte range requests
if "content-location" not in response:
response["content-location"] = absolute_uri
@@ -1779,6 +1790,9 @@ a string that contains the response entity body.
headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
+ # Prevent CWE-75 space injection to manipulate request via part of uri.
+ # Prevent CWE-93 CRLF injection to modify headers via part of uri.
+ uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A")
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
@@ -1822,6 +1836,7 @@ a string that contains the response entity body.
headers["accept-encoding"] = "gzip, deflate"
info = email.message.Message()
+ cachekey = None
cached_value = None
if self.cache:
cachekey = defrag_uri
@@ -1839,8 +1854,6 @@ a string that contains the response entity body.
self.cache.delete(cachekey)
cachekey = None
cached_value = None
- else:
- cachekey = None
if (
method in self.optimistic_concurrency_methods
@@ -1852,13 +1865,15 @@ a string that contains the response entity body.
# http://www.w3.org/1999/04/Editing/
headers["if-match"] = info["etag"]
- if method not in ["GET", "HEAD"] and self.cache and cachekey:
- # RFC 2616 Section 13.10
+ # https://tools.ietf.org/html/rfc7234
+ # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location
+ # when a non-error status code is received in response to an unsafe request method.
+ if self.cache and cachekey and method not in self.safe_methods:
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
- if method in ["GET", "HEAD"] and "vary" in info:
+ if method in self.safe_methods and "vary" in info:
vary = info["vary"]
vary_headers = vary.lower().replace(" ", "").split(",")
for header in vary_headers:
@@ -1869,11 +1884,14 @@ a string that contains the response entity body.
break
if (
- cached_value
- and method in ["GET", "HEAD"]
- and self.cache
+ self.cache
+ and cached_value
+ and (method in self.safe_methods or info["status"] == "308")
and "range" not in headers
):
+ redirect_method = method
+ if info["status"] not in ("307", "308"):
+ redirect_method = "GET"
if "-x-permanent-redirect-url" in info:
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
@@ -1884,7 +1902,7 @@ a string that contains the response entity body.
)
(response, new_content) = self.request(
info["-x-permanent-redirect-url"],
- method="GET",
+ method=redirect_method,
headers=headers,
redirections=redirections - 1,
)
diff --git a/python3/httplib2/socks.py b/python3/httplib2/socks.py
index 2926b4e..cc68e63 100644
--- a/python3/httplib2/socks.py
+++ b/python3/httplib2/socks.py
@@ -238,7 +238,15 @@ class socksocket(socket.socket):
headers - Additional or modified headers for the proxy connect
request.
"""
- self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
+ self.__proxy = (
+ proxytype,
+ addr,
+ port,
+ rdns,
+ username.encode() if username else None,
+ password.encode() if password else None,
+ headers,
+ )
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
diff --git a/script/compile-py3-openssl11.sh b/script/compile-py3-openssl11.sh
index 3486043..59f5022 100644
--- a/script/compile-py3-openssl11.sh
+++ b/script/compile-py3-openssl11.sh
@@ -3,7 +3,7 @@
cache_dir=$HOME/.cache
install_dir=$cache_dir/py3-openssl11
python_version="3.7.3"
-openssl_version="1.1.1c"
+openssl_version="1.1.1f"
cpucount=$(nproc --all)
export PYTHONDONTWRITEBYTECODE=1
@@ -14,7 +14,7 @@ if [[ $($install_dir/bin/python -V) != "Python $python_version" ]] ; then
mkdir -p /tmp/source
cd /tmp/source
# Compile OpenSSL
- wget --quiet https://www.openssl.org/source/openssl-$openssl_version.tar.gz
+ curl -fLOsS "https://www.openssl.org/source/openssl-$openssl_version.tar.gz"
echo "Extracting OpenSSL..."
tar xf openssl-$openssl_version.tar.gz
cd ./openssl-$openssl_version
@@ -29,7 +29,7 @@ if [[ $($install_dir/bin/python -V) != "Python $python_version" ]] ; then
cd /tmp/source
sudo apt install -qq --yes libffi-dev
# Compile latest Python
- wget --quiet https://www.python.org/ftp/python/$python_version/Python-$python_version.tar.xz
+ curl -fLOsS "https://www.python.org/ftp/python/$python_version/Python-$python_version.tar.xz"
echo "Extracting Python..."
tar xf Python-$python_version.tar.xz
cd ./Python-$python_version
diff --git a/script/release b/script/release
index 0f98e3e..a2ff80d 100755
--- a/script/release
+++ b/script/release
@@ -45,9 +45,9 @@ auto_prepare_release() {
last_tag=$(git tag --sort=-version:refname |head -n1)
last_tag=${last_tag##v}
version_replace="${last_tag}.post$(date -u +%y%m%d%H%M)"
- update_version "setup.py" "s/VERSION =.+/VERSION = '$version_replace'/"
- update_version "python2/httplib2/__init__.py" "s/__version__ =.+/__version__ = '$version_replace'/"
- update_version "python3/httplib2/__init__.py" "s/__version__ =.+/__version__ = '$version_replace'/"
+ update_version "setup.py" "s/VERSION =.+/VERSION = \"$version_replace\"/"
+ update_version "python2/httplib2/__init__.py" "s/__version__ =.+/__version__ = \"$version_replace\"/"
+ update_version "python3/httplib2/__init__.py" "s/__version__ =.+/__version__ = \"$version_replace\"/"
version_check "$version_replace"
fi
}
@@ -90,10 +90,11 @@ interactive() {
local venv=./venv-release
if [[ ! -d "$venv" ]] ; then
virtualenv $venv
- $venv/bin/pip install -U pip setuptools wheel twine
+ $venv/bin/pip install -U check-manifest pip 'setuptools>=43.0' wheel twine
fi
$venv/bin/python setup.py clean --all
$venv/bin/python setup.py sdist bdist_wheel
+ $venv/bin/check-manifest || echo "FIXME check-manifest" >&2
if confirm "Upload to PyPI? Use in special situation, normally CI (Travis) will upload to PyPI. [yN] " ; then
$venv/bin/twine upload dist/* || exit 1
@@ -132,9 +133,9 @@ bump_version() {
fi
echo "Next version: '$version_next'" >&2
- update_version "python3/httplib2/__init__.py" "s/__version__ =.+/__version__ = '$version_next'/"
- update_version "python2/httplib2/__init__.py" "s/__version__ =.+/__version__ = '$version_next'/"
- update_version "setup.py" "s/VERSION =.+/VERSION = '$version_next'/"
+ update_version "python3/httplib2/__init__.py" "s/__version__ =.+/__version__ = \"$version_next\"/"
+ update_version "python2/httplib2/__init__.py" "s/__version__ =.+/__version__ = \"$version_next\"/"
+ update_version "setup.py" "s/VERSION =.+/VERSION = \"$version_next\"/"
confirm "Confirm changes? [yN] " || exit 1
}
@@ -142,8 +143,8 @@ bump_version() {
update_version() {
local path="$1"
local sed_expr="$2"
- # sed -E --in-place='' -e "s/VERSION =.+/VERSION = '$version_replace'/" setup.py
- # sed -E --in-place='' -e "s/__version__ =.+/__version__ = '$version_replace'/" python2/httplib2/__init__.py python3/httplib2/__init__.py
+ # sed -E --in-place='' -e "s/VERSION =.+/VERSION = \"$version_replace\"/" setup.py
+ # sed -E --in-place='' -e "s/__version__ =.+/__version__ = \"$version_replace\"/" python2/httplib2/__init__.py python3/httplib2/__init__.py
echo "Updating file '$path'" >&2
if ! sed -E --in-place='' -e "$sed_expr" "$path" ; then
echo "sed error $?" >&2
@@ -209,7 +210,7 @@ assert_tree_clean() {
version_check() {
local need=$1
- local version_setup=$(fgrep 'VERSION =' setup.py |tr -d " '" |cut -d\= -f2)
+ local version_setup=$(fgrep 'VERSION =' setup.py |tr -d " '\"" |cut -d\= -f2)
local version_py2=$(cd python2 ; python2 -Es -c 'import httplib2;print(httplib2.__version__)')
local version_py3=$(cd python3 ; python3 -Es -c 'import httplib2;print(httplib2.__version__)')
if [[ "$version_setup" != "$need" ]] ; then
diff --git a/setup.py b/setup.py
index 33c8827..b66d24e 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ import setuptools.command.test
import sys
pkgdir = {"": "python%s" % sys.version_info[0]}
-VERSION = '0.15.0'
+VERSION = "0.18.1"
# `python setup.py test` uses existing Python environment, no virtualenv, no pip.
diff --git a/tests/__init__.py b/tests/__init__.py
index 496652b..02a3ecf 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -75,7 +75,7 @@ class BufferedReader(object):
chunk = b""
else:
chunk = self._sock.recv(8 << 10)
- # print('!!! recv', chunk)
+ # print("!!! recv", chunk)
if not chunk:
self._end = True
if untilend:
@@ -406,10 +406,12 @@ def server_request(request_handler, **kwargs):
request = HttpRequest.from_buffered(buf)
if request is None:
break
+ # print("--- debug request\n" + request.raw.decode("ascii", "replace"))
i += 1
request.client_sock = sock
request.number = i
response = request_handler(request=request)
+ # print("--- debug response\n" + response.decode("ascii", "replace"))
sock.sendall(response)
request.client_sock = None
if not tick(request):
diff --git a/tests/test_http.py b/tests/test_http.py
index 97b52dc..f61992c 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -622,6 +622,57 @@ def test_get_307():
assert content == b"final content\n"
+def test_post_307():
+ # 307: follow with same method
+ http = httplib2.Http(cache=tests.get_cache_path(), timeout=1)
+ http.follow_all_redirects = True
+ r307 = tests.http_response_bytes(status=307, headers={"location": "/final"})
+ r200 = tests.http_response_bytes(status=200, body=b"final content\n")
+
+ with tests.server_list_http([r307, r200, r307, r200]) as uri:
+ response, content = http.request(uri, "POST")
+ assert response.previous.status == 307
+ assert not response.previous.fromcache
+ assert response.status == 200
+ assert not response.fromcache
+ assert content == b"final content\n"
+
+ response, content = http.request(uri, "POST")
+ assert response.previous.status == 307
+ assert not response.previous.fromcache
+ assert response.status == 200
+ assert not response.fromcache
+ assert content == b"final content\n"
+
+
+def test_change_308():
+ # 308: follow with same method, cache redirect
+ http = httplib2.Http(cache=tests.get_cache_path(), timeout=1)
+ routes = {
+ "/final": tests.make_http_reflect(),
+ "": tests.http_response_bytes(
+ status="308 Permanent Redirect",
+ add_date=True,
+ headers={"cache-control": "max-age=300", "location": "/final"},
+ ),
+ }
+
+ with tests.server_route(routes, request_count=3) as uri:
+ response, content = http.request(uri, "CHANGE", body=b"hello308")
+ assert response.previous.status == 308
+ assert not response.previous.fromcache
+ assert response.status == 200
+ assert not response.fromcache
+ assert content.startswith(b"CHANGE /final HTTP")
+
+ response, content = http.request(uri, "CHANGE")
+ assert response.previous.status == 308
+ assert response.previous.fromcache
+ assert response.status == 200
+ assert not response.fromcache
+ assert content.startswith(b"CHANGE /final HTTP")
+
+
def test_get_410():
# Test that we pass 410's through
http = httplib2.Http()
@@ -643,3 +694,42 @@ content"""
assert response.status == 200
assert content == b"content"
assert response["link"], "link1, link2"
+
+
+def test_custom_redirect_codes():
+ http = httplib2.Http()
+ http.redirect_codes = set([300])
+ with tests.server_const_http(status=301, request_count=1) as uri:
+ response, content = http.request(uri, "GET")
+ assert response.status == 301
+ assert response.previous is None
+
+
+def test_cwe93_inject_crlf():
+ # https://cwe.mitre.org/data/definitions/93.html
+ # GET /?q= HTTP/1.1 <- injected "HTTP/1.1" from attacker
+ # injected: attack
+ # ignore-http: HTTP/1.1 <- nominal "HTTP/1.1" from library
+ # Host: localhost:57285
+ http = httplib2.Http()
+ with tests.server_reflect() as uri:
+ danger_url = urllib.parse.urljoin(
+ uri, "?q= HTTP/1.1\r\ninjected: attack\r\nignore-http:"
+ )
+ response, content = http.request(danger_url, "GET")
+ assert response.status == 200
+ req = tests.HttpRequest.from_bytes(content)
+ assert req.headers.get("injected") is None
+
+
+def test_inject_space():
+ # Injecting space into request line is precursor to CWE-93 and possibly other injections
+ http = httplib2.Http()
+ with tests.server_reflect() as uri:
+ # "\r\nignore-http:" suffix is nuance for current server implementation
+ # please only pay attention to space after "?q="
+ danger_url = urllib.parse.urljoin(uri, "?q= HTTP/1.1\r\nignore-http:")
+ response, content = http.request(danger_url, "GET")
+ assert response.status == 200
+ req = tests.HttpRequest.from_bytes(content)
+ assert req.uri == "/?q=%20HTTP/1.1%0D%0Aignore-http:"
diff --git a/tests/test_proxy.py b/tests/test_proxy.py
index 375367f..edafe01 100644
--- a/tests/test_proxy.py
+++ b/tests/test_proxy.py
@@ -32,20 +32,22 @@ def test_from_url_ident():
pi = httplib2.proxy_info_from_url("http://zoidberg:fish@someproxy:99")
assert pi.proxy_host == "someproxy"
assert pi.proxy_port == 99
- assert pi.proxy_user == b"zoidberg"
- assert pi.proxy_pass == b"fish"
+ assert pi.proxy_user == "zoidberg"
+ assert pi.proxy_pass == "fish"
-def test_from_env():
- os.environ["http_proxy"] = "http://myproxy.example.com:8080"
+def test_from_env(monkeypatch):
+ assert os.environ.get("http_proxy") is None
+ monkeypatch.setenv("http_proxy", "http://myproxy.example.com:8080")
pi = httplib2.proxy_info_from_environment()
assert pi.proxy_host == "myproxy.example.com"
assert pi.proxy_port == 8080
-def test_from_env_https():
- os.environ["http_proxy"] = "http://myproxy.example.com:80"
- os.environ["https_proxy"] = "http://myproxy.example.com:81"
+def test_from_env_https(monkeypatch):
+ assert os.environ.get("http_proxy") is None
+ monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80")
+ monkeypatch.setenv("https_proxy", "http://myproxy.example.com:81")
pi = httplib2.proxy_info_from_environment("https")
assert pi.proxy_host == "myproxy.example.com"
assert pi.proxy_port == 81
@@ -57,10 +59,10 @@ def test_from_env_none():
assert pi is None
-def test_applies_to():
- os.environ["http_proxy"] = "http://myproxy.example.com:80"
- os.environ["https_proxy"] = "http://myproxy.example.com:81"
- os.environ["no_proxy"] = "localhost,example.com,.wildcard"
+def test_applies_to(monkeypatch):
+ monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80")
+ monkeypatch.setenv("https_proxy", "http://myproxy.example.com:81")
+ monkeypatch.setenv("no_proxy", "localhost,example.com,.wildcard")
pi = httplib2.proxy_info_from_environment()
assert not pi.applies_to("localhost")
assert pi.applies_to("www.google.com")
@@ -71,18 +73,18 @@ def test_applies_to():
assert not pi.applies_to("pub.sub.wildcard")
-def test_noproxy_trailing_comma():
- os.environ["http_proxy"] = "http://myproxy.example.com:80"
- os.environ["no_proxy"] = "localhost,other.host,"
+def test_noproxy_trailing_comma(monkeypatch):
+ monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80")
+ monkeypatch.setenv("no_proxy", "localhost,other.host,")
pi = httplib2.proxy_info_from_environment()
assert not pi.applies_to("localhost")
assert not pi.applies_to("other.host")
assert pi.applies_to("example.domain")
-def test_noproxy_star():
- os.environ["http_proxy"] = "http://myproxy.example.com:80"
- os.environ["NO_PROXY"] = "*"
+def test_noproxy_star(monkeypatch):
+ monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80")
+ monkeypatch.setenv("NO_PROXY", "*")
pi = httplib2.proxy_info_from_environment()
for host in ("localhost", "169.254.38.192", "www.google.com"):
assert not pi.applies_to(host)
@@ -171,3 +173,35 @@ def test_socks5_auth():
http = httplib2.Http(proxy_info=proxy_info)
with tests.assert_raises(httplib2.socks.Socks5AuthError):
http.request(uri, "GET")
+
+
+def test_functional_noproxy_star_http(monkeypatch):
+ def handler(request):
+ if request.method == "CONNECT":
+ return tests.http_response_bytes(
+ status="400 Expected direct", headers={"connection": "close"},
+ )
+ return tests.http_response_bytes()
+
+ with tests.server_request(handler) as uri:
+ monkeypatch.setenv("http_proxy", uri)
+ monkeypatch.setenv("no_proxy", "*")
+ http = httplib2.Http()
+ response, _ = http.request(uri, "GET")
+ assert response.status == 200
+
+
+def test_functional_noproxy_star_https(monkeypatch):
+ def handler(request):
+ if request.method == "CONNECT":
+ return tests.http_response_bytes(
+ status="400 Expected direct", headers={"connection": "close"},
+ )
+ return tests.http_response_bytes()
+
+ with tests.server_request(handler, tls=True) as uri:
+ monkeypatch.setenv("https_proxy", uri)
+ monkeypatch.setenv("no_proxy", "*")
+ http = httplib2.Http(ca_certs=tests.CA_CERTS)
+ response, _ = http.request(uri, "GET")
+ assert response.status == 200