Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

linkcheck tests: test webservers: enable HTTP/1.1 protocol #11392

Merged
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
58 changes: 50 additions & 8 deletions tests/test_build_linkcheck.py
Expand Up @@ -27,24 +27,39 @@


class DefaultsHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
if self.path[1:].rstrip() == "":
self.send_response(200, "OK")
self.send_header("Content-Length", "0")
self.end_headers()
elif self.path[1:].rstrip() == "anchor.html":
self.send_response(200, "OK")
self.end_headers()
else:
self.send_response(404, "Not Found")
self.send_header("Content-Length", "0")
self.end_headers()

def do_GET(self):
self.do_HEAD()
if self.path[1:].rstrip() == "":
self.wfile.write(b"ok\n\n")
content = b"ok\n\n"
elif self.path[1:].rstrip() == "anchor.html":
doc = '<!DOCTYPE html><html><body><a id="found"></a></body></html>'
self.wfile.write(doc.encode('utf-8'))
content = doc.encode("utf-8")
else:
content = b""

if content:
self.send_response(200, "OK")
self.send_header("Content-Length", str(len(content)))
self.end_headers()
self.wfile.write(content)
else:
self.send_response(404, "Not Found")
self.send_header("Content-Length", "0")
self.end_headers()


@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True)
Expand Down Expand Up @@ -181,6 +196,8 @@ def test_anchors_ignored(app):
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True)
def test_raises_for_invalid_status(app):
class InternalServerErrorHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_GET(self):
self.send_error(500, "Internal Server Error")

Expand All @@ -196,13 +213,16 @@ def do_GET(self):

def capture_headers_handler(records):
class HeadersDumperHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
self.do_GET()

def do_GET(self):
records.append(self.headers.as_string())
self.send_response(200, "OK")
self.send_header("Content-Length", "0")
self.end_headers()
records.append(self.headers.as_string())
return HeadersDumperHandler


Expand Down Expand Up @@ -291,11 +311,14 @@ def test_linkcheck_request_headers_default(app):

def make_redirect_handler(*, support_head):
class RedirectOnceHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
if support_head:
self.do_GET()
else:
self.send_response(405, "Method Not Allowed")
self.send_header("Content-Length", "0")
self.end_headers()

def do_GET(self):
Expand All @@ -304,6 +327,7 @@ def do_GET(self):
else:
self.send_response(302, "Found")
self.send_header("Location", "http://localhost:7777/?redirected=1")
self.send_header("Content-Length", "0")
self.end_headers()

def log_date_time_string(self):
Expand Down Expand Up @@ -381,13 +405,19 @@ def test_linkcheck_allowed_redirects(app, warning):


class OKHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
self.send_response(200, "OK")
self.send_header("Content-Length", "0")
self.end_headers()

def do_GET(self):
self.do_HEAD()
self.wfile.write(b"ok\n")
content = b"ok\n"
self.send_response(200, "OK")
self.send_header("Content-Length", str(len(content)))
self.end_headers()
self.wfile.write(content)


@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
Expand Down Expand Up @@ -492,15 +522,21 @@ def test_connect_to_selfsigned_nonexistent_cert_file(app):


class InfiniteRedirectOnHeadHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
self.send_response(302, "Found")
self.send_header("Location", "http://localhost:7777/")
self.send_header("Content-Length", "0")
self.end_headers()

def do_GET(self):
content = b"ok\n"
self.send_response(200, "OK")
self.send_header("Content-Length", str(len(content)))
self.end_headers()
self.wfile.write(b"ok\n")
self.wfile.write(content)
self.close_connection = True # we don't expect the client to read this response body


@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
Expand All @@ -526,11 +562,14 @@ def test_TooManyRedirects_on_HEAD(app, monkeypatch):

def make_retry_after_handler(responses):
class RetryAfterHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
status, retry_after = responses.pop(0)
self.send_response(status)
if retry_after:
self.send_header('Retry-After', retry_after)
self.send_header("Content-Length", "0")
self.end_headers()

def log_date_time_string(self):
Expand Down Expand Up @@ -677,11 +716,14 @@ def test_limit_rate_bails_out_after_waiting_max_time(app):


class ConnectionResetHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"

def do_HEAD(self):
self.connection.close()
self.close_connection = True

def do_GET(self):
self.send_response(200, "OK")
self.send_header("Content-Length", "0")
self.end_headers()


Expand Down
2 changes: 1 addition & 1 deletion tests/utils.py
Expand Up @@ -19,7 +19,7 @@
class HttpServerThread(threading.Thread):
def __init__(self, handler, *args, **kwargs):
super().__init__(*args, **kwargs)
self.server = http.server.HTTPServer(("localhost", 7777), handler)
self.server = http.server.ThreadingHTTPServer(("localhost", 7777), handler)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note for reviewers: without threaded webservers, HTTP1.1's ability to leave connections open causes difficulty for the Sphinx unit tests in combination with urllib3 commit urllib3/urllib3@a80c248 - a connection-pool thread safety fix, included from v2.0.0 of that library onwards.


def run(self):
self.server.serve_forever(poll_interval=0.001)
Expand Down