From 90da407a4818b21bd5a33347a3c4566189c4377d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 04:01:26 +0000 Subject: [PATCH 1/6] chore(internal): update conftest.py --- tests/conftest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 008b8b7..7e794cd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + from __future__ import annotations import os From 67ffb34adaaef43b4e4e469e5fff7ce3cdca3dcf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 06:30:30 +0000 Subject: [PATCH 2/6] chore(ci): enable for pull requests --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 091d911..dac6e70 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,10 @@ on: - 'integrated/**' - 'stl-preview-head/**' - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' jobs: lint: From a9be2e087bd6f01301608322a50b321b0b01d4da Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 22:16:06 +0000 Subject: [PATCH 3/6] feat(api): api update --- .github/workflows/ci.yml | 4 -- .stats.yml | 4 +- .../deployments/test_predictions.py | 68 ++++++++++++++----- .../api_resources/models/test_predictions.py | 68 ++++++++++++++----- tests/api_resources/test_predictions.py | 40 +++++------ tests/conftest.py | 2 - tests/test_client.py | 26 +++++-- 7 files changed, 146 insertions(+), 66 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dac6e70..091d911 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,10 +7,6 @@ on: - 'integrated/**' - 'stl-preview-head/**' - 'stl-preview-base/**' - pull_request: - branches-ignore: - - 'stl-preview-head/**' - - 'stl-preview-base/**' jobs: lint: diff --git a/.stats.yml b/.stats.yml index fa985e6..29aa088 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 35 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/replicate%2Freplicate-client-37cd8ea847eb57706035f766ca549d5b4e2111053af0656a2df9a8150421428e.yml -openapi_spec_hash: a3e4d6fd9aff6de0e4b6d8ad28cbbe05 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/replicate%2Freplicate-client-12e7ef40109b6b34f1471a638d09b79f005c8dbf7e1a8aeca9db7e37a334e8eb.yml +openapi_spec_hash: 10b0fc9094dac5d51f46bbdd5fe3de32 config_hash: 12536d2bf978a995771d076a4647c17d diff --git a/tests/api_resources/deployments/test_predictions.py b/tests/api_resources/deployments/test_predictions.py index ab03f7e..72ae47d 100644 --- a/tests/api_resources/deployments/test_predictions.py +++ b/tests/api_resources/deployments/test_predictions.py @@ -23,7 +23,10 @@ def test_method_create(self, client: Replicate) -> None: prediction = client.deployments.predictions.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -33,10 +36,13 @@ def test_method_create_with_all_params(self, client: Replicate) -> None: prediction = client.deployments.predictions.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, stream=True, - webhook="webhook", - webhook_events_filter=["start"], + webhook="https://example.com/my-webhook-handler", + webhook_events_filter=["start", "completed"], prefer="wait=5", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -47,7 +53,10 @@ def test_raw_response_create(self, client: Replicate) -> None: response = client.deployments.predictions.with_raw_response.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert response.is_closed is True @@ -61,7 +70,10 @@ def test_streaming_response_create(self, client: Replicate) -> None: with client.deployments.predictions.with_streaming_response.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -78,14 +90,20 @@ def test_path_params_create(self, client: Replicate) -> None: client.deployments.predictions.with_raw_response.create( deployment_owner="", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `deployment_name` but received ''"): client.deployments.predictions.with_raw_response.create( deployment_owner="deployment_owner", deployment_name="", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) @@ -98,7 +116,10 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None: prediction = await async_client.deployments.predictions.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -108,10 +129,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate) prediction = await async_client.deployments.predictions.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, stream=True, - webhook="webhook", - webhook_events_filter=["start"], + webhook="https://example.com/my-webhook-handler", + webhook_events_filter=["start", "completed"], prefer="wait=5", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -122,7 +146,10 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None: response = await async_client.deployments.predictions.with_raw_response.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert response.is_closed is True @@ -136,7 +163,10 @@ async def test_streaming_response_create(self, async_client: AsyncReplicate) -> async with async_client.deployments.predictions.with_streaming_response.create( deployment_owner="deployment_owner", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -153,12 +183,18 @@ async def test_path_params_create(self, async_client: AsyncReplicate) -> None: await async_client.deployments.predictions.with_raw_response.create( deployment_owner="", deployment_name="deployment_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `deployment_name` but received ''"): await async_client.deployments.predictions.with_raw_response.create( deployment_owner="deployment_owner", deployment_name="", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) diff --git a/tests/api_resources/models/test_predictions.py b/tests/api_resources/models/test_predictions.py index 0399b37..93e1377 100644 --- a/tests/api_resources/models/test_predictions.py +++ b/tests/api_resources/models/test_predictions.py @@ -23,7 +23,10 @@ def test_method_create(self, client: Replicate) -> None: prediction = client.models.predictions.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -33,10 +36,13 @@ def test_method_create_with_all_params(self, client: Replicate) -> None: prediction = client.models.predictions.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, stream=True, - webhook="webhook", - webhook_events_filter=["start"], + webhook="https://example.com/my-webhook-handler", + webhook_events_filter=["start", "completed"], prefer="wait=5", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -47,7 +53,10 @@ def test_raw_response_create(self, client: Replicate) -> None: response = client.models.predictions.with_raw_response.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert response.is_closed is True @@ -61,7 +70,10 @@ def test_streaming_response_create(self, client: Replicate) -> None: with client.models.predictions.with_streaming_response.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -78,14 +90,20 @@ def test_path_params_create(self, client: Replicate) -> None: client.models.predictions.with_raw_response.create( model_owner="", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"): client.models.predictions.with_raw_response.create( model_owner="model_owner", model_name="", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) @@ -98,7 +116,10 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None: prediction = await async_client.models.predictions.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -108,10 +129,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate) prediction = await async_client.models.predictions.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, stream=True, - webhook="webhook", - webhook_events_filter=["start"], + webhook="https://example.com/my-webhook-handler", + webhook_events_filter=["start", "completed"], prefer="wait=5", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -122,7 +146,10 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None: response = await async_client.models.predictions.with_raw_response.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) assert response.is_closed is True @@ -136,7 +163,10 @@ async def test_streaming_response_create(self, async_client: AsyncReplicate) -> async with async_client.models.predictions.with_streaming_response.create( model_owner="model_owner", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -153,12 +183,18 @@ async def test_path_params_create(self, async_client: AsyncReplicate) -> None: await async_client.models.predictions.with_raw_response.create( model_owner="", model_name="model_name", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"): await async_client.models.predictions.with_raw_response.create( model_owner="model_owner", model_name="", - input={}, + input={ + "prompt": "Tell me a joke", + "system_prompt": "You are a helpful assistant", + }, ) diff --git a/tests/api_resources/test_predictions.py b/tests/api_resources/test_predictions.py index da7d81b..e1839c2 100644 --- a/tests/api_resources/test_predictions.py +++ b/tests/api_resources/test_predictions.py @@ -23,8 +23,8 @@ class TestPredictions: @parametrize def test_method_create(self, client: Replicate) -> None: prediction = client.predictions.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -32,11 +32,11 @@ def test_method_create(self, client: Replicate) -> None: @parametrize def test_method_create_with_all_params(self, client: Replicate) -> None: prediction = client.predictions.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", stream=True, - webhook="webhook", - webhook_events_filter=["start"], + webhook="https://example.com/my-webhook-handler", + webhook_events_filter=["start", "completed"], prefer="wait=5", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -45,8 +45,8 @@ def test_method_create_with_all_params(self, client: Replicate) -> None: @parametrize def test_raw_response_create(self, client: Replicate) -> None: response = client.predictions.with_raw_response.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", ) assert response.is_closed is True @@ -58,8 +58,8 @@ def test_raw_response_create(self, client: Replicate) -> None: @parametrize def test_streaming_response_create(self, client: Replicate) -> None: with client.predictions.with_streaming_response.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -198,8 +198,8 @@ class TestAsyncPredictions: @parametrize async def test_method_create(self, async_client: AsyncReplicate) -> None: prediction = await async_client.predictions.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -207,11 +207,11 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None: @parametrize async def test_method_create_with_all_params(self, async_client: AsyncReplicate) -> None: prediction = await async_client.predictions.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", stream=True, - webhook="webhook", - webhook_events_filter=["start"], + webhook="https://example.com/my-webhook-handler", + webhook_events_filter=["start", "completed"], prefer="wait=5", ) assert_matches_type(Prediction, prediction, path=["response"]) @@ -220,8 +220,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate) @parametrize async def test_raw_response_create(self, async_client: AsyncReplicate) -> None: response = await async_client.predictions.with_raw_response.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", ) assert response.is_closed is True @@ -233,8 +233,8 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncReplicate) -> None: async with async_client.predictions.with_streaming_response.create( - input={}, - version="version", + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/conftest.py b/tests/conftest.py index 7e794cd..008b8b7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,3 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - from __future__ import annotations import os diff --git a/tests/test_client.py b/tests/test_client.py index 40e8376..9c2e088 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -815,7 +815,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/predictions").mock(side_effect=retry_handler) - response = client.predictions.with_raw_response.create(input={}, version="version") + response = client.predictions.with_raw_response.create( + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + ) assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -840,7 +843,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/predictions").mock(side_effect=retry_handler) response = client.predictions.with_raw_response.create( - input={}, version="version", extra_headers={"x-stainless-retry-count": Omit()} + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + extra_headers={"x-stainless-retry-count": Omit()}, ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -865,7 +870,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/predictions").mock(side_effect=retry_handler) response = client.predictions.with_raw_response.create( - input={}, version="version", extra_headers={"x-stainless-retry-count": "42"} + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + extra_headers={"x-stainless-retry-count": "42"}, ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" @@ -1681,7 +1688,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/predictions").mock(side_effect=retry_handler) - response = await client.predictions.with_raw_response.create(input={}, version="version") + response = await client.predictions.with_raw_response.create( + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + ) assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -1707,7 +1717,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/predictions").mock(side_effect=retry_handler) response = await client.predictions.with_raw_response.create( - input={}, version="version", extra_headers={"x-stainless-retry-count": Omit()} + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + extra_headers={"x-stainless-retry-count": Omit()}, ) assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @@ -1733,7 +1745,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: respx_mock.post("/predictions").mock(side_effect=retry_handler) response = await client.predictions.with_raw_response.create( - input={}, version="version", extra_headers={"x-stainless-retry-count": "42"} + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + extra_headers={"x-stainless-retry-count": "42"}, ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" From 4f54c7a76e5107b854e82f5266578e4f84aacc74 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 02:06:32 +0000 Subject: [PATCH 4/6] chore(readme): update badges --- .github/workflows/ci.yml | 4 ++++ README.md | 2 +- tests/conftest.py | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 091d911..dac6e70 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,10 @@ on: - 'integrated/**' - 'stl-preview-head/**' - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' jobs: lint: diff --git a/README.md b/README.md index 52fc747..b79c6ba 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Replicate Python API library -[![PyPI version](https://img.shields.io/pypi/v/replicate.svg)](https://pypi.org/project/replicate/) +[![PyPI version]()](https://pypi.org/project/replicate/) The Replicate Python library provides convenient access to the Replicate REST API from any Python 3.8+ application. The library includes type definitions for all request params and response fields, diff --git a/tests/conftest.py b/tests/conftest.py index 008b8b7..7e794cd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + from __future__ import annotations import os From 3dfe4f711c061b6197017a5b999f9db4e7f2836d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 05:40:27 +0000 Subject: [PATCH 5/6] fix(tests): fix: tests which call HTTP endpoints directly with the example parameters --- tests/test_client.py | 93 ++++++++++++-------------------------------- 1 file changed, 24 insertions(+), 69 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index 9c2e088..2f73b4e 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -23,9 +23,7 @@ from replicate import Replicate, AsyncReplicate, APIResponseValidationError from replicate._types import Omit -from replicate._utils import maybe_transform from replicate._models import BaseModel, FinalRequestOptions -from replicate._constants import RAW_RESPONSE_HEADER from replicate._exceptions import APIStatusError, ReplicateError, APITimeoutError, APIResponseValidationError from replicate._base_client import ( DEFAULT_TIMEOUT, @@ -35,7 +33,6 @@ DefaultAsyncHttpxClient, make_request_options, ) -from replicate.types.prediction_create_params import PredictionCreateParams from .utils import update_env @@ -743,50 +740,27 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str @mock.patch("replicate._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Replicate) -> None: respx_mock.post("/predictions").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - self.client.post( - "/predictions", - body=cast( - object, - maybe_transform( - dict( - input={"text": "Alice"}, - version="replicate/hello-world:5c7d5dc6dd8bf75c1acaa8565735e7986bc5b66206b55cca93cb72c9bf15ccaa", - ), - PredictionCreateParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) + client.predictions.with_streaming_response.create( + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + ).__enter__() assert _get_open_connections(self.client) == 0 @mock.patch("replicate._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Replicate) -> None: respx_mock.post("/predictions").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - self.client.post( - "/predictions", - body=cast( - object, - maybe_transform( - dict( - input={"text": "Alice"}, - version="replicate/hello-world:5c7d5dc6dd8bf75c1acaa8565735e7986bc5b66206b55cca93cb72c9bf15ccaa", - ), - PredictionCreateParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) - + client.predictions.with_streaming_response.create( + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + ).__enter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @@ -1615,50 +1589,31 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte @mock.patch("replicate._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + async def test_retrying_timeout_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncReplicate + ) -> None: respx_mock.post("/predictions").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - await self.client.post( - "/predictions", - body=cast( - object, - maybe_transform( - dict( - input={"text": "Alice"}, - version="replicate/hello-world:5c7d5dc6dd8bf75c1acaa8565735e7986bc5b66206b55cca93cb72c9bf15ccaa", - ), - PredictionCreateParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) + await async_client.predictions.with_streaming_response.create( + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + ).__aenter__() assert _get_open_connections(self.client) == 0 @mock.patch("replicate._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + async def test_retrying_status_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncReplicate + ) -> None: respx_mock.post("/predictions").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - await self.client.post( - "/predictions", - body=cast( - object, - maybe_transform( - dict( - input={"text": "Alice"}, - version="replicate/hello-world:5c7d5dc6dd8bf75c1acaa8565735e7986bc5b66206b55cca93cb72c9bf15ccaa", - ), - PredictionCreateParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) - + await async_client.predictions.with_streaming_response.create( + input={"text": "Alice"}, + version="replicate/hello-world:9dcd6d78e7c6560c340d916fe32e9f24aabfa331e5cce95fe31f77fb03121426", + ).__aenter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) From 869c5e5c1af8ceebe97ccedeb2d0fd022900776f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 05:40:45 +0000 Subject: [PATCH 6/6] release: 2.0.0-alpha.4 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 20 ++++++++++++++++++++ pyproject.toml | 2 +- src/replicate/_version.py | 2 +- 4 files changed, 23 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index be496c0..d50ed41 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.0.0-alpha.3" + ".": "2.0.0-alpha.4" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index f836f8b..8422405 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## 2.0.0-alpha.4 (2025-06-18) + +Full Changelog: [v2.0.0-alpha.3...v2.0.0-alpha.4](https://github.com/replicate/replicate-python-stainless/compare/v2.0.0-alpha.3...v2.0.0-alpha.4) + +### Features + +* **api:** api update ([a9be2e0](https://github.com/replicate/replicate-python-stainless/commit/a9be2e087bd6f01301608322a50b321b0b01d4da)) + + +### Bug Fixes + +* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([3dfe4f7](https://github.com/replicate/replicate-python-stainless/commit/3dfe4f711c061b6197017a5b999f9db4e7f2836d)) + + +### Chores + +* **ci:** enable for pull requests ([67ffb34](https://github.com/replicate/replicate-python-stainless/commit/67ffb34adaaef43b4e4e469e5fff7ce3cdca3dcf)) +* **internal:** update conftest.py ([90da407](https://github.com/replicate/replicate-python-stainless/commit/90da407a4818b21bd5a33347a3c4566189c4377d)) +* **readme:** update badges ([4f54c7a](https://github.com/replicate/replicate-python-stainless/commit/4f54c7a76e5107b854e82f5266578e4f84aacc74)) + ## 2.0.0-alpha.3 (2025-06-17) Full Changelog: [v2.0.0-alpha.2...v2.0.0-alpha.3](https://github.com/replicate/replicate-python-stainless/compare/v2.0.0-alpha.2...v2.0.0-alpha.3) diff --git a/pyproject.toml b/pyproject.toml index 2cddc95..8f9560a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "replicate" -version = "2.0.0-alpha.3" +version = "2.0.0-alpha.4" description = "The official Python library for the replicate API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/replicate/_version.py b/src/replicate/_version.py index 877de5d..ffe5082 100644 --- a/src/replicate/_version.py +++ b/src/replicate/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "replicate" -__version__ = "2.0.0-alpha.3" # x-release-please-version +__version__ = "2.0.0-alpha.4" # x-release-please-version