Skip to content

Commit 3c42e2b

Browse files
vdusekclaude
andcommitted
test: address PR review feedback
- Parametrize tests with multiple assertions into single-assert-per-case - Remove redundant service_locator._configuration = None (handled by autouse fixture) - Inject mocked API clients to avoid accessing private _api_client attribute - Move drop() calls into finally blocks for cleanup on test failure - Always include is_running_in_ipython flag in get_system_info (True/False) - Remove duplicate assert in test_create_same_hmac - Remove unnecessary type: ignore comment Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 6048ac8 commit 3c42e2b

File tree

11 files changed

+184
-150
lines changed

11 files changed

+184
-150
lines changed

src/apify/_utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@ def get_system_info() -> dict:
2121
'os': sys.platform,
2222
}
2323

24-
if is_running_in_ipython():
25-
system_info['is_running_in_ipython'] = True
24+
system_info['is_running_in_ipython'] = is_running_in_ipython()
2625

2726
return system_info
2827

tests/integration/test_dataset.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -82,16 +82,17 @@ async def test_same_references_in_named_dataset(apify_token: str, monkeypatch: p
8282

8383
async with Actor:
8484
dataset_by_name_1 = await Actor.open_dataset(name=dataset_name)
85-
dataset_by_name_2 = await Actor.open_dataset(name=dataset_name)
86-
assert dataset_by_name_1 is dataset_by_name_2
87-
88-
dataset_1_metadata = await dataset_by_name_1.get_metadata()
89-
dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id)
90-
dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id)
91-
assert dataset_by_id_1 is dataset_by_name_1
92-
assert dataset_by_id_2 is dataset_by_id_1
93-
94-
await dataset_by_name_1.drop()
85+
try:
86+
dataset_by_name_2 = await Actor.open_dataset(name=dataset_name)
87+
assert dataset_by_name_1 is dataset_by_name_2
88+
89+
dataset_1_metadata = await dataset_by_name_1.get_metadata()
90+
dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id)
91+
dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id)
92+
assert dataset_by_id_1 is dataset_by_name_1
93+
assert dataset_by_id_2 is dataset_by_id_1
94+
finally:
95+
await dataset_by_name_1.drop()
9596

9697

9798
async def test_force_cloud(

tests/integration/test_key_value_store.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -46,16 +46,17 @@ async def test_same_references_in_named_kvs(apify_token: str, monkeypatch: pytes
4646

4747
async with Actor:
4848
kvs_by_name_1 = await Actor.open_key_value_store(name=kvs_name)
49-
kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name)
50-
assert kvs_by_name_1 is kvs_by_name_2
51-
52-
kvs_1_metadata = await kvs_by_name_1.get_metadata()
53-
kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
54-
kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
55-
assert kvs_by_id_1 is kvs_by_name_1
56-
assert kvs_by_id_2 is kvs_by_id_1
57-
58-
await kvs_by_name_1.drop()
49+
try:
50+
kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name)
51+
assert kvs_by_name_1 is kvs_by_name_2
52+
53+
kvs_1_metadata = await kvs_by_name_1.get_metadata()
54+
kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
55+
kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
56+
assert kvs_by_id_1 is kvs_by_name_1
57+
assert kvs_by_id_2 is kvs_by_id_1
58+
finally:
59+
await kvs_by_name_1.drop()
5960

6061

6162
async def test_set_and_get_value_in_same_run(key_value_store_apify: KeyValueStore) -> None:

tests/integration/test_request_queue.py

Lines changed: 78 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -1003,13 +1003,16 @@ async def default_handler(context: BasicCrawlingContext) -> None:
10031003
# Make sure all requests were handled.
10041004
assert crawler.statistics.state.requests_finished == requests
10051005

1006-
# Check the request queue stats
1007-
await asyncio.sleep(10) # Wait to be sure that metadata are updated
1006+
try:
1007+
# Check the request queue stats
1008+
await asyncio.sleep(10) # Wait to be sure that metadata are updated
1009+
1010+
metadata = cast('ApifyRequestQueueMetadata', await rq.get_metadata())
1011+
Actor.log.info(f'{metadata.stats=}')
1012+
assert metadata.stats.write_count == requests * expected_write_count_per_request
10081013

1009-
metadata = cast('ApifyRequestQueueMetadata', await rq.get_metadata())
1010-
Actor.log.info(f'{metadata.stats=}')
1011-
assert metadata.stats.write_count == requests * expected_write_count_per_request
1012-
await rq.drop()
1014+
finally:
1015+
await rq.drop()
10131016

10141017

10151018
async def test_cache_initialization(apify_token: str, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -1217,16 +1220,17 @@ async def test_same_references_in_named_rq(apify_token: str, monkeypatch: pytest
12171220

12181221
async with Actor:
12191222
rq_by_name_1 = await Actor.open_request_queue(name=rq_name)
1220-
rq_by_name_2 = await Actor.open_request_queue(name=rq_name)
1221-
assert rq_by_name_1 is rq_by_name_2
1222-
1223-
rq_1_metadata = await rq_by_name_1.get_metadata()
1224-
rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id)
1225-
rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id)
1226-
assert rq_by_id_1 is rq_by_name_1
1227-
assert rq_by_id_2 is rq_by_id_1
1228-
1229-
await rq_by_name_1.drop()
1223+
try:
1224+
rq_by_name_2 = await Actor.open_request_queue(name=rq_name)
1225+
assert rq_by_name_1 is rq_by_name_2
1226+
1227+
rq_1_metadata = await rq_by_name_1.get_metadata()
1228+
rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id)
1229+
rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id)
1230+
assert rq_by_id_1 is rq_by_name_1
1231+
assert rq_by_id_2 is rq_by_id_1
1232+
finally:
1233+
await rq_by_name_1.drop()
12301234

12311235

12321236
async def test_request_queue_deduplication(
@@ -1330,63 +1334,63 @@ async def test_concurrent_processing_simulation(apify_token: str, monkeypatch: p
13301334
)
13311335
async with Actor:
13321336
rq = await Actor.open_request_queue()
1337+
try:
1338+
for i in range(20):
1339+
await rq.add_request(f'https://example.com/concurrent/{i}')
13331340

1334-
for i in range(20):
1335-
await rq.add_request(f'https://example.com/concurrent/{i}')
1336-
1337-
total_count = await rq.get_total_count()
1338-
assert total_count == 20
1341+
total_count = await rq.get_total_count()
1342+
assert total_count == 20
13391343

1340-
async def worker() -> int:
1341-
processed = 0
1342-
request_counter = 0
1344+
async def worker() -> int:
1345+
processed = 0
1346+
request_counter = 0
13431347

1344-
while request := await rq.fetch_next_request():
1345-
await asyncio.sleep(0.01)
1348+
while request := await rq.fetch_next_request():
1349+
await asyncio.sleep(0.01)
13461350

1347-
if request_counter % 5 == 0 and request_counter > 0:
1348-
await rq.reclaim_request(request)
1349-
else:
1350-
await rq.mark_request_as_handled(request)
1351-
processed += 1
1351+
if request_counter % 5 == 0 and request_counter > 0:
1352+
await rq.reclaim_request(request)
1353+
else:
1354+
await rq.mark_request_as_handled(request)
1355+
processed += 1
13521356

1353-
request_counter += 1
1357+
request_counter += 1
13541358

1355-
return processed
1359+
return processed
13561360

1357-
workers = [worker() for _ in range(3)]
1358-
results = await asyncio.gather(*workers)
1361+
workers = [worker() for _ in range(3)]
1362+
results = await asyncio.gather(*workers)
13591363

1360-
total_processed = sum(results)
1364+
total_processed = sum(results)
13611365

1362-
assert total_processed > 0
1363-
assert len(results) == 3
1366+
assert total_processed > 0
1367+
assert len(results) == 3
13641368

1365-
handled_after_workers = await rq.get_handled_count()
1366-
assert handled_after_workers == total_processed
1369+
handled_after_workers = await rq.get_handled_count()
1370+
assert handled_after_workers == total_processed
13671371

1368-
total_after_workers = await rq.get_total_count()
1369-
assert total_after_workers == 20
1372+
total_after_workers = await rq.get_total_count()
1373+
assert total_after_workers == 20
13701374

1371-
remaining_count = 0
1372-
while not await rq.is_finished():
1373-
request = await rq.fetch_next_request()
1374-
if request:
1375-
remaining_count += 1
1376-
await rq.mark_request_as_handled(request)
1377-
else:
1378-
break
1379-
1380-
final_handled = await rq.get_handled_count()
1381-
final_total = await rq.get_total_count()
1382-
assert final_handled == 20
1383-
assert final_total == 20
1384-
assert total_processed + remaining_count == 20
1375+
remaining_count = 0
1376+
while not await rq.is_finished():
1377+
request = await rq.fetch_next_request()
1378+
if request:
1379+
remaining_count += 1
1380+
await rq.mark_request_as_handled(request)
1381+
else:
1382+
break
13851383

1386-
is_finished = await rq.is_finished()
1387-
assert is_finished is True
1384+
final_handled = await rq.get_handled_count()
1385+
final_total = await rq.get_total_count()
1386+
assert final_handled == 20
1387+
assert final_total == 20
1388+
assert total_processed + remaining_count == 20
13881389

1389-
await rq.drop()
1390+
is_finished = await rq.is_finished()
1391+
assert is_finished is True
1392+
finally:
1393+
await rq.drop()
13901394

13911395

13921396
async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -1399,22 +1403,22 @@ async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -
13991403
async with Actor:
14001404
rq1 = await Actor.open_request_queue(name=rq_name_1)
14011405
rq2 = await Actor.open_request_queue(name=rq_name_2)
1406+
try:
1407+
assert rq1 is not rq2
14021408

1403-
assert rq1 is not rq2
1404-
1405-
await rq1.add_request('https://example.com/queue1-request')
1406-
await rq2.add_request('https://example.com/queue2-request')
1407-
1408-
req1 = await rq1.fetch_next_request()
1409-
req2 = await rq2.fetch_next_request()
1409+
await rq1.add_request('https://example.com/queue1-request')
1410+
await rq2.add_request('https://example.com/queue2-request')
14101411

1411-
assert req1 is not None
1412-
assert 'queue1' in req1.url
1413-
assert req2 is not None
1414-
assert 'queue2' in req2.url
1412+
req1 = await rq1.fetch_next_request()
1413+
req2 = await rq2.fetch_next_request()
14151414

1416-
await rq1.mark_request_as_handled(req1)
1417-
await rq2.mark_request_as_handled(req2)
1415+
assert req1 is not None
1416+
assert 'queue1' in req1.url
1417+
assert req2 is not None
1418+
assert 'queue2' in req2.url
14181419

1419-
await rq1.drop()
1420-
await rq2.drop()
1420+
await rq1.mark_request_as_handled(req1)
1421+
await rq2.mark_request_as_handled(req2)
1422+
finally:
1423+
await rq1.drop()
1424+
await rq2.drop()

tests/unit/actor/test_actor_helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ async def test_remote_method_with_invalid_timeout(
296296
async with Actor:
297297
actor_method = getattr(Actor, actor_method_name)
298298
with pytest.raises(ValueError, match='Invalid timeout'):
299-
await actor_method(entity_id, timeout='invalid') # type: ignore[arg-type]
299+
await actor_method(entity_id, timeout='invalid')
300300

301301

302302
async def test_abort_with_status_message(

tests/unit/storage_clients/test_apify_dataset_client.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,24 +8,30 @@
88
from apify.storage_clients._apify._dataset_client import ApifyDatasetClient
99

1010

11-
def _make_dataset_client() -> ApifyDatasetClient:
11+
def _make_dataset_client(api_client: AsyncMock | None = None) -> tuple[ApifyDatasetClient, AsyncMock]:
1212
"""Create an ApifyDatasetClient with a mocked API client."""
13-
api_client = AsyncMock()
14-
return ApifyDatasetClient(api_client=api_client, api_public_base_url='', lock=asyncio.Lock())
13+
if api_client is None:
14+
api_client = AsyncMock()
15+
16+
return ApifyDatasetClient(
17+
api_client=api_client,
18+
api_public_base_url='',
19+
lock=asyncio.Lock(),
20+
), api_client
1521

1622

1723
async def test_purge_raises_not_implemented() -> None:
1824
"""Test that purge() raises NotImplementedError."""
19-
client = _make_dataset_client()
25+
client, _ = _make_dataset_client()
2026
with pytest.raises(NotImplementedError, match='Purging datasets is not supported'):
2127
await client.purge()
2228

2329

2430
async def test_drop_calls_api_delete() -> None:
2531
"""Test that drop() delegates to the API client."""
26-
client = _make_dataset_client()
32+
client, api_client = _make_dataset_client()
2733
await client.drop()
28-
client._api_client.delete.assert_awaited_once() # ty: ignore[possibly-missing-attribute]
34+
api_client.delete.assert_awaited_once()
2935

3036

3137
async def test_deprecated_api_public_base_url() -> None:

0 commit comments

Comments
 (0)