diff --git a/tests/test_build_linkcheck.py b/tests/test_build_linkcheck.py index 70b992a6fa9..9b08854b4ff 100644 --- a/tests/test_build_linkcheck.py +++ b/tests/test_build_linkcheck.py @@ -16,7 +16,12 @@ import pytest -from sphinx.builders.linkcheck import HyperlinkAvailabilityCheckWorker, RateLimit +from sphinx.builders.linkcheck import ( + CheckRequest, + Hyperlink, + HyperlinkAvailabilityCheckWorker, + RateLimit, +) from sphinx.testing.util import strip_escseq from sphinx.util import requests from sphinx.util.console import strip_colors @@ -764,6 +769,47 @@ def test_limit_rate_bails_out_after_waiting_max_time(app): assert next_check is None +@mock.patch('sphinx.util.requests.requests.Session.get_adapter') +def test_connection_contention(get_adapter, app, capsys): + # Create a shared, but limited-size, connection pool + import requests + get_adapter.return_value = requests.adapters.HTTPAdapter(pool_maxsize=1) + + # Set an upper-bound on socket timeouts globally + import socket + socket.setdefaulttimeout(5) + + # Place a workload into the linkcheck queue + link_count = 10 + rqueue, wqueue = Queue(), Queue() + for _ in range(link_count): + wqueue.put(CheckRequest(0, Hyperlink("http://localhost:7777", "test", "test.rst", 1))) + + # Create parallel consumer threads + with http_server(make_redirect_handler(support_head=True)): + begin, checked = time.time(), [] + threads = [ + HyperlinkAvailabilityCheckWorker( + config=app.config, + rqueue=rqueue, + wqueue=wqueue, + rate_limits={}, + ) + for _ in range(10) + ] + for thread in threads: + thread.start() + while time.time() < begin + 5 and len(checked) < link_count: + checked.append(rqueue.get(timeout=5)) + for thread in threads: + thread.join(timeout=0) + + # Ensure that all items were consumed within the time limit + _, stderr = capsys.readouterr() + assert len(checked) == link_count + assert "TimeoutError" not in stderr + + class ConnectionResetHandler(http.server.BaseHTTPRequestHandler): protocol_version = "HTTP/1.1"