Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ on:
- 'integrated/**'
- 'stl-preview-head/**'
- 'stl-preview-base/**'
pull_request:
branches-ignore:
- 'stl-preview-head/**'
- 'stl-preview-base/**'

jobs:
lint:
Expand Down
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "2.0.0-alpha.3"
".": "2.0.0-alpha.4"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 35
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/replicate%2Freplicate-client-37cd8ea847eb57706035f766ca549d5b4e2111053af0656a2df9a8150421428e.yml
openapi_spec_hash: a3e4d6fd9aff6de0e4b6d8ad28cbbe05
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/replicate%2Freplicate-client-12e7ef40109b6b34f1471a638d09b79f005c8dbf7e1a8aeca9db7e37a334e8eb.yml
openapi_spec_hash: 10b0fc9094dac5d51f46bbdd5fe3de32
config_hash: 12536d2bf978a995771d076a4647c17d
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
# Changelog

## 2.0.0-alpha.4 (2025-06-18)

Full Changelog: [v2.0.0-alpha.3...v2.0.0-alpha.4](https://github.com/replicate/replicate-python-stainless/compare/v2.0.0-alpha.3...v2.0.0-alpha.4)

### Features

* **api:** api update ([a9be2e0](https://github.com/replicate/replicate-python-stainless/commit/a9be2e087bd6f01301608322a50b321b0b01d4da))


### Bug Fixes

* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([3dfe4f7](https://github.com/replicate/replicate-python-stainless/commit/3dfe4f711c061b6197017a5b999f9db4e7f2836d))


### Chores

* **ci:** enable for pull requests ([67ffb34](https://github.com/replicate/replicate-python-stainless/commit/67ffb34adaaef43b4e4e469e5fff7ce3cdca3dcf))
* **internal:** update conftest.py ([90da407](https://github.com/replicate/replicate-python-stainless/commit/90da407a4818b21bd5a33347a3c4566189c4377d))
* **readme:** update badges ([4f54c7a](https://github.com/replicate/replicate-python-stainless/commit/4f54c7a76e5107b854e82f5266578e4f84aacc74))

## 2.0.0-alpha.3 (2025-06-17)

Full Changelog: [v2.0.0-alpha.2...v2.0.0-alpha.3](https://github.com/replicate/replicate-python-stainless/compare/v2.0.0-alpha.2...v2.0.0-alpha.3)
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Replicate Python API library

[![PyPI version](https://img.shields.io/pypi/v/replicate.svg)](https://pypi.org/project/replicate/)
[![PyPI version](<https://img.shields.io/pypi/v/replicate.svg?label=pypi%20(stable)>)](https://pypi.org/project/replicate/)

The Replicate Python library provides convenient access to the Replicate REST API from any Python 3.8+
application. The library includes type definitions for all request params and response fields,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "replicate"
version = "2.0.0-alpha.3"
version = "2.0.0-alpha.4"
description = "The official Python library for the replicate API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
2 changes: 1 addition & 1 deletion src/replicate/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "replicate"
__version__ = "2.0.0-alpha.3" # x-release-please-version
__version__ = "2.0.0-alpha.4" # x-release-please-version
68 changes: 52 additions & 16 deletions tests/api_resources/deployments/test_predictions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ def test_method_create(self, client: Replicate) -> None:
prediction = client.deployments.predictions.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)
assert_matches_type(Prediction, prediction, path=["response"])

Expand All @@ -33,10 +36,13 @@ def test_method_create_with_all_params(self, client: Replicate) -> None:
prediction = client.deployments.predictions.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
stream=True,
webhook="webhook",
webhook_events_filter=["start"],
webhook="https://example.com/my-webhook-handler",
webhook_events_filter=["start", "completed"],
prefer="wait=5",
)
assert_matches_type(Prediction, prediction, path=["response"])
Expand All @@ -47,7 +53,10 @@ def test_raw_response_create(self, client: Replicate) -> None:
response = client.deployments.predictions.with_raw_response.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

assert response.is_closed is True
Expand All @@ -61,7 +70,10 @@ def test_streaming_response_create(self, client: Replicate) -> None:
with client.deployments.predictions.with_streaming_response.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand All @@ -78,14 +90,20 @@ def test_path_params_create(self, client: Replicate) -> None:
client.deployments.predictions.with_raw_response.create(
deployment_owner="",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

with pytest.raises(ValueError, match=r"Expected a non-empty value for `deployment_name` but received ''"):
client.deployments.predictions.with_raw_response.create(
deployment_owner="deployment_owner",
deployment_name="",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)


Expand All @@ -98,7 +116,10 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None:
prediction = await async_client.deployments.predictions.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)
assert_matches_type(Prediction, prediction, path=["response"])

Expand All @@ -108,10 +129,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate)
prediction = await async_client.deployments.predictions.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
stream=True,
webhook="webhook",
webhook_events_filter=["start"],
webhook="https://example.com/my-webhook-handler",
webhook_events_filter=["start", "completed"],
prefer="wait=5",
)
assert_matches_type(Prediction, prediction, path=["response"])
Expand All @@ -122,7 +146,10 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None:
response = await async_client.deployments.predictions.with_raw_response.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

assert response.is_closed is True
Expand All @@ -136,7 +163,10 @@ async def test_streaming_response_create(self, async_client: AsyncReplicate) ->
async with async_client.deployments.predictions.with_streaming_response.create(
deployment_owner="deployment_owner",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand All @@ -153,12 +183,18 @@ async def test_path_params_create(self, async_client: AsyncReplicate) -> None:
await async_client.deployments.predictions.with_raw_response.create(
deployment_owner="",
deployment_name="deployment_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

with pytest.raises(ValueError, match=r"Expected a non-empty value for `deployment_name` but received ''"):
await async_client.deployments.predictions.with_raw_response.create(
deployment_owner="deployment_owner",
deployment_name="",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)
68 changes: 52 additions & 16 deletions tests/api_resources/models/test_predictions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ def test_method_create(self, client: Replicate) -> None:
prediction = client.models.predictions.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)
assert_matches_type(Prediction, prediction, path=["response"])

Expand All @@ -33,10 +36,13 @@ def test_method_create_with_all_params(self, client: Replicate) -> None:
prediction = client.models.predictions.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
stream=True,
webhook="webhook",
webhook_events_filter=["start"],
webhook="https://example.com/my-webhook-handler",
webhook_events_filter=["start", "completed"],
prefer="wait=5",
)
assert_matches_type(Prediction, prediction, path=["response"])
Expand All @@ -47,7 +53,10 @@ def test_raw_response_create(self, client: Replicate) -> None:
response = client.models.predictions.with_raw_response.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

assert response.is_closed is True
Expand All @@ -61,7 +70,10 @@ def test_streaming_response_create(self, client: Replicate) -> None:
with client.models.predictions.with_streaming_response.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand All @@ -78,14 +90,20 @@ def test_path_params_create(self, client: Replicate) -> None:
client.models.predictions.with_raw_response.create(
model_owner="",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
client.models.predictions.with_raw_response.create(
model_owner="model_owner",
model_name="",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)


Expand All @@ -98,7 +116,10 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None:
prediction = await async_client.models.predictions.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)
assert_matches_type(Prediction, prediction, path=["response"])

Expand All @@ -108,10 +129,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate)
prediction = await async_client.models.predictions.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
stream=True,
webhook="webhook",
webhook_events_filter=["start"],
webhook="https://example.com/my-webhook-handler",
webhook_events_filter=["start", "completed"],
prefer="wait=5",
)
assert_matches_type(Prediction, prediction, path=["response"])
Expand All @@ -122,7 +146,10 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None:
response = await async_client.models.predictions.with_raw_response.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

assert response.is_closed is True
Expand All @@ -136,7 +163,10 @@ async def test_streaming_response_create(self, async_client: AsyncReplicate) ->
async with async_client.models.predictions.with_streaming_response.create(
model_owner="model_owner",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand All @@ -153,12 +183,18 @@ async def test_path_params_create(self, async_client: AsyncReplicate) -> None:
await async_client.models.predictions.with_raw_response.create(
model_owner="",
model_name="model_name",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)

with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
await async_client.models.predictions.with_raw_response.create(
model_owner="model_owner",
model_name="",
input={},
input={
"prompt": "Tell me a joke",
"system_prompt": "You are a helpful assistant",
},
)
Loading