Skip to content

Commit 1283607

Browse files
committed
fix: Optimize test performance and resolve Windows parallelization issues
- Increase CI timeout from 10 to 15 minutes for Windows compatibility - Add integration test markers to avoid parallelization conflicts - Split Windows test execution: run integration tests sequentially, others in parallel - Optimize server startup logic with better timeouts and retry intervals - Add socket timeouts and improved error handling for server startup - This should resolve the 7+ minute test hangs on Windows CI
1 parent 4abb5c2 commit 1283607

File tree

3 files changed

+58
-19
lines changed

3 files changed

+58
-19
lines changed

.github/workflows/shared.yml

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828

2929
test:
3030
runs-on: ${{ matrix.os }}
31-
timeout-minutes: 10
31+
timeout-minutes: 15
3232
continue-on-error: true
3333
strategy:
3434
matrix:
@@ -48,7 +48,15 @@ jobs:
4848
run: uv sync --frozen --all-extras --python ${{ matrix.python-version }}
4949

5050
- name: Run pytest
51-
run: uv run --frozen --no-sync pytest
51+
run: |
52+
if [ "${{ matrix.os }}" = "windows-latest" ]; then
53+
# Run integration tests without parallelization on Windows to avoid multiprocessing issues
54+
uv run --frozen --no-sync pytest -m "not integration" --numprocesses auto
55+
uv run --frozen --no-sync pytest -m integration --numprocesses 1
56+
else
57+
uv run --frozen --no-sync pytest
58+
fi
59+
shell: bash
5260

5361
# This must run last as it modifies the environment!
5462
- name: Run pytest with lowest versions

pyproject.toml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,11 @@ addopts = """
120120
--capture=fd
121121
--numprocesses auto
122122
"""
123+
# Disable parallelization for integration tests that spawn subprocesses
124+
# This prevents Windows issues with multiprocessing + subprocess conflicts
125+
markers = [
126+
"integration: marks tests as integration tests (may run without parallelization)",
127+
]
123128
filterwarnings = [
124129
"error",
125130
# This should be fixed on Uvicorn's side.

tests/server/fastmcp/test_integration.py

Lines changed: 43 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@
55
single-feature servers across different transports (SSE and StreamableHTTP).
66
"""
77

8+
# Mark all tests in this file as integration tests
9+
pytestmark = pytest.mark.integration
10+
811
import json
912
import multiprocessing
1013
import socket
@@ -89,7 +92,9 @@ def run_server_with_transport(module_name: str, port: int, transport: str) -> No
8992
import os
9093

9194
# Add examples/snippets to Python path for multiprocessing context
92-
snippets_path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "examples", "snippets")
95+
snippets_path = os.path.join(
96+
os.path.dirname(__file__), "..", "..", "..", "examples", "snippets"
97+
)
9398
sys.path.insert(0, os.path.abspath(snippets_path))
9499

95100
# Import the servers module in the multiprocessing context
@@ -138,7 +143,9 @@ def run_server_with_transport(module_name: str, port: int, transport: str) -> No
138143
else:
139144
raise ValueError(f"Invalid transport for test server: {transport}")
140145

141-
server = uvicorn.Server(config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error"))
146+
server = uvicorn.Server(
147+
config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error")
148+
)
142149
print(f"Starting {transport} server on port {port}")
143150
server.run()
144151

@@ -163,19 +170,24 @@ def server_transport(request, server_port: int) -> Generator[str, None, None]:
163170
)
164171
proc.start()
165172

166-
# Wait for server to be running
167-
max_attempts = 20
173+
# Wait for server to be running - optimized for faster startup
174+
max_attempts = 30 # Increased attempts for Windows
168175
attempt = 0
169176
while attempt < max_attempts:
170177
try:
171178
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
179+
s.settimeout(1.0) # Add socket timeout
172180
s.connect(("127.0.0.1", server_port))
173181
break
174-
except ConnectionRefusedError:
175-
time.sleep(0.1)
182+
except (ConnectionRefusedError, OSError):
183+
# Use shorter initial delays, then increase
184+
delay = 0.05 if attempt < 10 else 0.1
185+
time.sleep(delay)
176186
attempt += 1
177187
else:
178-
raise RuntimeError(f"Server failed to start after {max_attempts} attempts")
188+
raise RuntimeError(
189+
f"Server failed to start after {max_attempts} attempts (port {server_port})"
190+
)
179191

180192
yield transport
181193

@@ -346,10 +358,14 @@ async def test_basic_prompts(server_transport: str, server_url: str) -> None:
346358

347359
# Test review_code prompt
348360
prompts = await session.list_prompts()
349-
review_prompt = next((p for p in prompts.prompts if p.name == "review_code"), None)
361+
review_prompt = next(
362+
(p for p in prompts.prompts if p.name == "review_code"), None
363+
)
350364
assert review_prompt is not None
351365

352-
prompt_result = await session.get_prompt("review_code", {"code": "def hello():\n print('Hello')"})
366+
prompt_result = await session.get_prompt(
367+
"review_code", {"code": "def hello():\n print('Hello')"}
368+
)
353369
assert isinstance(prompt_result, GetPromptResult)
354370
assert len(prompt_result.messages) == 1
355371
assert isinstance(prompt_result.messages[0].content, TextContent)
@@ -405,16 +421,18 @@ async def test_tool_progress(server_transport: str, server_url: str) -> None:
405421
assert result.capabilities.tools is not None
406422

407423
# Test long_running_task tool that reports progress
408-
tool_result = await session.call_tool("long_running_task", {"task_name": "test", "steps": 3})
424+
tool_result = await session.call_tool(
425+
"long_running_task", {"task_name": "test", "steps": 3}
426+
)
409427
assert len(tool_result.content) == 1
410428
assert isinstance(tool_result.content[0], TextContent)
411429
assert "Task 'test' completed" in tool_result.content[0].text
412430

413431
# Verify that progress notifications or log messages were sent
414432
# Progress can come through either progress notifications or log messages
415-
total_notifications = len(notification_collector.progress_notifications) + len(
416-
notification_collector.log_messages
417-
)
433+
total_notifications = len(
434+
notification_collector.progress_notifications
435+
) + len(notification_collector.log_messages)
418436
assert total_notifications > 0
419437

420438

@@ -435,7 +453,9 @@ async def test_sampling(server_transport: str, server_url: str) -> None:
435453

436454
async with client_cm as client_streams:
437455
read_stream, write_stream = unpack_streams(client_streams)
438-
async with ClientSession(read_stream, write_stream, sampling_callback=sampling_callback) as session:
456+
async with ClientSession(
457+
read_stream, write_stream, sampling_callback=sampling_callback
458+
) as session:
439459
# Test initialization
440460
result = await session.initialize()
441461
assert isinstance(result, InitializeResult)
@@ -466,7 +486,9 @@ async def test_elicitation(server_transport: str, server_url: str) -> None:
466486

467487
async with client_cm as client_streams:
468488
read_stream, write_stream = unpack_streams(client_streams)
469-
async with ClientSession(read_stream, write_stream, elicitation_callback=elicitation_callback) as session:
489+
async with ClientSession(
490+
read_stream, write_stream, elicitation_callback=elicitation_callback
491+
) as session:
470492
# Test initialization
471493
result = await session.initialize()
472494
assert isinstance(result, InitializeResult)
@@ -512,7 +534,9 @@ async def test_completion(server_transport: str, server_url: str) -> None:
512534
assert len(prompts.prompts) > 0
513535

514536
# Test getting a prompt
515-
prompt_result = await session.get_prompt("review_code", {"language": "python", "code": "def test(): pass"})
537+
prompt_result = await session.get_prompt(
538+
"review_code", {"language": "python", "code": "def test(): pass"}
539+
)
516540
assert len(prompt_result.messages) > 0
517541

518542

@@ -624,7 +648,9 @@ async def test_structured_output(server_transport: str, server_url: str) -> None
624648
assert result.serverInfo.name == "Structured Output Example"
625649

626650
# Test get_weather tool
627-
weather_result = await session.call_tool("get_weather", {"city": "New York"})
651+
weather_result = await session.call_tool(
652+
"get_weather", {"city": "New York"}
653+
)
628654
assert len(weather_result.content) == 1
629655
assert isinstance(weather_result.content[0], TextContent)
630656

0 commit comments

Comments
 (0)