From 064608d6ad5e0406b06d4b5fec0d48f7ea56e208 Mon Sep 17 00:00:00 2001 From: speakeasybot Date: Wed, 22 Oct 2025 02:11:39 +0000 Subject: [PATCH 1/2] ## Python SDK Changes Detected: * `glean.client.chat.create()`: * `request.messages.[].agent_config.use_image_generation` **Added** * `response.messages.[].agent_config.use_image_generation` **Added** * `glean.client.chat.retrieve()`: `response.chat_result.chat.messages.[].agent_config.use_image_generation` **Added** * `glean.client.chat.create_stream()`: * `request.messages.[].agent_config.use_image_generation` **Added** --- .speakeasy/gen.lock | 16 ++-- .speakeasy/gen.yaml | 4 +- .speakeasy/glean-merged-spec.yaml | 87 ++++++++++--------- .speakeasy/workflow.lock | 14 +-- RELEASES.md | 12 ++- docs/models/agentconfig.md | 3 +- docs/models/chatrequest.md | 6 +- docs/sdks/clientchat/README.md | 4 +- pyproject.toml | 2 +- speakeasyusagegen/.speakeasy/logs/naming.log | 4 +- src/glean/api_client/_version.py | 6 +- src/glean/api_client/client_chat.py | 64 +++++++------- src/glean/api_client/models/agentconfig.py | 7 ++ src/glean/api_client/models/chatrequest.py | 14 +-- .../pathpostrestapiv1createcollection.go | 20 ++--- .../sdk/models/components/agentconfig.go | 9 ++ .../sdk/models/components/chatrequest.go | 37 ++++---- 17 files changed, 176 insertions(+), 133 deletions(-) diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index d6b1ff0a..2cadafa8 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: 3e3290ca-0ee8-4981-b1bc-14536048fa63 management: - docChecksum: 5a4cd8de1e6ca5f701ac27e244d09799 + docChecksum: 30752ad325475357749711c885ecb81f docVersion: 0.9.0 - speakeasyVersion: 1.636.3 - generationVersion: 2.723.11 - releaseVersion: 0.10.3 - configChecksum: e07c2e6f4cf5159e031335490e5f2f34 + speakeasyVersion: 1.638.0 + generationVersion: 2.728.0 + releaseVersion: 0.11.0 + configChecksum: 07f0a9ba148add2a46e7e1b2f680ddab repoURL: https://github.com/gleanwork/api-client-python.git installationURL: https://github.com/gleanwork/api-client-python.git published: true @@ -16,7 +16,7 @@ features: additionalProperties: 1.0.1 configurableModuleName: 0.2.0 constsAndDefaults: 1.0.5 - core: 5.22.1 + core: 5.23.0 defaultEnabledRetries: 0.2.0 deprecations: 3.0.2 devContainers: 3.0.0 @@ -2742,6 +2742,4 @@ generatedTests: editcollection: "2025-06-12T19:13:52-04:00" createshortcut: "2025-06-12T19:13:52-04:00" updateshortcut: "2025-06-12T19:13:52-04:00" -releaseNotes: | - ## Python SDK Changes Detected: - * `glean.client.messages.retrieve()`: `request.datasource` **Changed** **Breaking** :warning: +releaseNotes: "## Python SDK Changes Detected:\n* `glean.client.chat.create()`: \n * `request.messages.[].agent_config.use_image_generation` **Added**\n * `response.messages.[].agent_config.use_image_generation` **Added**\n* `glean.client.chat.retrieve()`: `response.chat_result.chat.messages.[].agent_config.use_image_generation` **Added**\n* `glean.client.chat.create_stream()`: \n * `request.messages.[].agent_config.use_image_generation` **Added**\n" diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index df6d7da2..27bea97a 100644 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -20,6 +20,8 @@ generation: oAuth2ClientCredentialsEnabled: false oAuth2PasswordEnabled: false hoistGlobalSecurity: true + schemas: + allOfMergeStrategy: shallowMerge mockServer: disabled: false tests: @@ -27,7 +29,7 @@ generation: generateNewTests: true skipResponseBodyAssertions: true python: - version: 0.10.3 + version: 0.11.0 additionalDependencies: dev: {} main: {} diff --git a/.speakeasy/glean-merged-spec.yaml b/.speakeasy/glean-merged-spec.yaml index aea85d72..1197d74e 100644 --- a/.speakeasy/glean-merged-spec.yaml +++ b/.speakeasy/glean-merged-spec.yaml @@ -7047,6 +7047,9 @@ components: enum: - DEFAULT - QUICK + useImageGeneration: + type: boolean + description: Whether the agent should create an image. ChatFileStatus: type: string description: Current status of the file. @@ -7427,6 +7430,19 @@ components: deprecated: true type: boolean description: Signals there are additional response fragments incoming. + ChatRequestBase: + required: + - messages + description: The minimal set of fields that form a chat request. + properties: + messages: + type: array + description: A list of chat messages, from most recent to least recent. At least one message must specify a USER author. + items: + $ref: "#/components/schemas/ChatMessage" + sessionInfo: + description: Optional object for tracking the session used by the client and for debugging purposes. + $ref: "#/components/schemas/SessionInfo" ChatRestrictionFilters: allOf: - $ref: "#/components/schemas/RestrictionFilters" @@ -7441,45 +7457,38 @@ components: items: type: string ChatRequest: - required: - - messages - properties: - saveChat: - type: boolean - description: Save the current interaction as a Chat for the user to access and potentially continue later. - chatId: - type: string - description: The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. - messages: - type: array - description: A list of chat messages, from most recent to least recent. At least one message must specify a USER author. - items: - $ref: "#/components/schemas/ChatMessage" - agentConfig: - $ref: "#/components/schemas/AgentConfig" - description: Describes the agent that will execute the request. - inclusions: - $ref: "#/components/schemas/ChatRestrictionFilters" - description: A list of filters which only allows chat to access certain content. - exclusions: - $ref: "#/components/schemas/ChatRestrictionFilters" - description: A list of filters which disallows chat from accessing certain content. If content is in both inclusions and exclusions, it'll be excluded. - timeoutMillis: - type: integer - description: Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. - example: 30000 - sessionInfo: - description: Optional object for tracking the session used by the client and for debugging purposes. - $ref: "#/components/schemas/SessionInfo" - applicationId: - type: string - description: The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. - agentId: - type: string - description: The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. - stream: - type: boolean - description: If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. + allOf: + - $ref: "#/components/schemas/ChatRequestBase" + - type: object + properties: + saveChat: + type: boolean + description: Save the current interaction as a Chat for the user to access and potentially continue later. + chatId: + type: string + description: The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. + agentConfig: + $ref: "#/components/schemas/AgentConfig" + description: Describes the agent that will execute the request. + inclusions: + $ref: "#/components/schemas/ChatRestrictionFilters" + description: A list of filters which only allows chat to access certain content. + exclusions: + $ref: "#/components/schemas/ChatRestrictionFilters" + description: A list of filters which disallows chat from accessing certain content. If content is in both inclusions and exclusions, it'll be excluded. + timeoutMillis: + type: integer + description: Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. + example: 30000 + applicationId: + type: string + description: The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. + agentId: + type: string + description: The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. + stream: + type: boolean + description: If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. ChatResponse: description: A single response from the /chat backend. properties: diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 335f8e55..39b20e1f 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -1,12 +1,12 @@ -speakeasyVersion: 1.636.3 +speakeasyVersion: 1.638.0 sources: Glean API: sourceNamespace: glean-api-specs - sourceRevisionDigest: sha256:1dc42ac3d5012271bce5ddf879010efb2f292ded5ce08d224bb1cd28fcc4558d - sourceBlobDigest: sha256:fa9263107a00a7b99c1ea0d1c0100757e4abad76b9156a0c98186da8953f8f16 + sourceRevisionDigest: sha256:d7d301aaa9efeddb264ed6ae3d4186b8472d376b021c3f22f96d0c15e79c0ee6 + sourceBlobDigest: sha256:a97d8110c742a097fb206e255a509eb2407aec24dc4e9fb8dcacc9a6a52eb134 tags: - latest - - speakeasy-sdk-regen-1760583312 + - speakeasy-sdk-regen-1760999713 Glean Client API: sourceNamespace: glean-client-api sourceRevisionDigest: sha256:4edc63ad559e4f2c9fb9ebf5edaaaaa9269f1874d271cfd84b441d6dacac43d2 @@ -17,10 +17,10 @@ targets: glean: source: Glean API sourceNamespace: glean-api-specs - sourceRevisionDigest: sha256:1dc42ac3d5012271bce5ddf879010efb2f292ded5ce08d224bb1cd28fcc4558d - sourceBlobDigest: sha256:fa9263107a00a7b99c1ea0d1c0100757e4abad76b9156a0c98186da8953f8f16 + sourceRevisionDigest: sha256:d7d301aaa9efeddb264ed6ae3d4186b8472d376b021c3f22f96d0c15e79c0ee6 + sourceBlobDigest: sha256:a97d8110c742a097fb206e255a509eb2407aec24dc4e9fb8dcacc9a6a52eb134 codeSamplesNamespace: glean-api-specs-python-code-samples - codeSamplesRevisionDigest: sha256:c0d827ef14785e6dd39b1db016e2957246cd2c73a71693fd1ef691f7878c7917 + codeSamplesRevisionDigest: sha256:abdef7ef88aa0c1f53e27fbeb7135b40310218f3614256f70532ef9d39af53e5 workflow: workflowVersion: 1.0.0 speakeasyVersion: latest diff --git a/RELEASES.md b/RELEASES.md index ca377ab6..3f3bdfb1 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -308,4 +308,14 @@ Based on: ### Generated - [python v0.10.3] . ### Releases -- [PyPI v0.10.3] https://pypi.org/project/glean/0.10.3 - . \ No newline at end of file +- [PyPI v0.10.3] https://pypi.org/project/glean/0.10.3 - . + +## 2025-10-22 02:08:18 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.638.0 (2.728.0) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.11.0] . +### Releases +- [PyPI v0.11.0] https://pypi.org/project/glean/0.11.0 - . \ No newline at end of file diff --git a/docs/models/agentconfig.md b/docs/models/agentconfig.md index dccdc61b..a1664d98 100644 --- a/docs/models/agentconfig.md +++ b/docs/models/agentconfig.md @@ -9,4 +9,5 @@ Describes the agent that executes the request. | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | | `agent` | [Optional[models.AgentEnum]](../models/agentenum.md) | :heavy_minus_sign: | Name of the agent. | | `tool_sets` | [Optional[models.ToolSets]](../models/toolsets.md) | :heavy_minus_sign: | The types of tools that the agent is allowed to use. Only works with FAST and ADVANCED `agent` values | -| `mode` | [Optional[models.Mode]](../models/mode.md) | :heavy_minus_sign: | Top level modes to run GleanChat in. | \ No newline at end of file +| `mode` | [Optional[models.Mode]](../models/mode.md) | :heavy_minus_sign: | Top level modes to run GleanChat in. | +| `use_image_generation` | *Optional[bool]* | :heavy_minus_sign: | Whether the agent should create an image. | \ No newline at end of file diff --git a/docs/models/chatrequest.md b/docs/models/chatrequest.md index 9947a425..ae4528f9 100644 --- a/docs/models/chatrequest.md +++ b/docs/models/chatrequest.md @@ -1,18 +1,20 @@ # ChatRequest +The minimal set of fields that form a chat request. + ## Fields | Field | Type | Required | Description | Example | | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `messages` | List[[models.ChatMessage](../models/chatmessage.md)] | :heavy_check_mark: | A list of chat messages, from most recent to least recent. At least one message must specify a USER author. | | +| `session_info` | [Optional[models.SessionInfo]](../models/sessioninfo.md) | :heavy_minus_sign: | N/A | | | `save_chat` | *Optional[bool]* | :heavy_minus_sign: | Save the current interaction as a Chat for the user to access and potentially continue later. | | | `chat_id` | *Optional[str]* | :heavy_minus_sign: | The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. | | -| `messages` | List[[models.ChatMessage](../models/chatmessage.md)] | :heavy_check_mark: | A list of chat messages, from most recent to least recent. At least one message must specify a USER author. | | | `agent_config` | [Optional[models.AgentConfig]](../models/agentconfig.md) | :heavy_minus_sign: | Describes the agent that executes the request. | | | `inclusions` | [Optional[models.ChatRestrictionFilters]](../models/chatrestrictionfilters.md) | :heavy_minus_sign: | N/A | | | `exclusions` | [Optional[models.ChatRestrictionFilters]](../models/chatrestrictionfilters.md) | :heavy_minus_sign: | N/A | | | `timeout_millis` | *Optional[int]* | :heavy_minus_sign: | Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. | 30000 | -| `session_info` | [Optional[models.SessionInfo]](../models/sessioninfo.md) | :heavy_minus_sign: | N/A | | | `application_id` | *Optional[str]* | :heavy_minus_sign: | The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. | | | `agent_id` | *Optional[str]* | :heavy_minus_sign: | The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. | | | `stream` | *Optional[bool]* | :heavy_minus_sign: | If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. | | \ No newline at end of file diff --git a/docs/sdks/clientchat/README.md b/docs/sdks/clientchat/README.md index cf994862..dd0baea5 100644 --- a/docs/sdks/clientchat/README.md +++ b/docs/sdks/clientchat/README.md @@ -53,13 +53,13 @@ with Glean( | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `messages` | List[[models.ChatMessage](../../models/chatmessage.md)] | :heavy_check_mark: | A list of chat messages, from most recent to least recent. At least one message must specify a USER author. | | | `timezone_offset` | *Optional[int]* | :heavy_minus_sign: | The offset of the client's timezone in minutes from UTC. e.g. PDT is -420 because it's 7 hours behind UTC. | | +| `session_info` | [Optional[models.SessionInfo]](../../models/sessioninfo.md) | :heavy_minus_sign: | N/A | | | `save_chat` | *Optional[bool]* | :heavy_minus_sign: | Save the current interaction as a Chat for the user to access and potentially continue later. | | | `chat_id` | *Optional[str]* | :heavy_minus_sign: | The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. | | | `agent_config` | [Optional[models.AgentConfig]](../../models/agentconfig.md) | :heavy_minus_sign: | Describes the agent that executes the request. | | | `inclusions` | [Optional[models.ChatRestrictionFilters]](../../models/chatrestrictionfilters.md) | :heavy_minus_sign: | N/A | | | `exclusions` | [Optional[models.ChatRestrictionFilters]](../../models/chatrestrictionfilters.md) | :heavy_minus_sign: | N/A | | | `timeout_millis` | *Optional[int]* | :heavy_minus_sign: | Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. | 30000 | -| `session_info` | [Optional[models.SessionInfo]](../../models/sessioninfo.md) | :heavy_minus_sign: | N/A | | | `application_id` | *Optional[str]* | :heavy_minus_sign: | The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. | | | `agent_id` | *Optional[str]* | :heavy_minus_sign: | The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. | | | `stream` | *Optional[bool]* | :heavy_minus_sign: | If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. | | @@ -430,13 +430,13 @@ with Glean( | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `messages` | List[[models.ChatMessage](../../models/chatmessage.md)] | :heavy_check_mark: | A list of chat messages, from most recent to least recent. At least one message must specify a USER author. | | | `timezone_offset` | *Optional[int]* | :heavy_minus_sign: | The offset of the client's timezone in minutes from UTC. e.g. PDT is -420 because it's 7 hours behind UTC. | | +| `session_info` | [Optional[models.SessionInfo]](../../models/sessioninfo.md) | :heavy_minus_sign: | N/A | | | `save_chat` | *Optional[bool]* | :heavy_minus_sign: | Save the current interaction as a Chat for the user to access and potentially continue later. | | | `chat_id` | *Optional[str]* | :heavy_minus_sign: | The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. | | | `agent_config` | [Optional[models.AgentConfig]](../../models/agentconfig.md) | :heavy_minus_sign: | Describes the agent that executes the request. | | | `inclusions` | [Optional[models.ChatRestrictionFilters]](../../models/chatrestrictionfilters.md) | :heavy_minus_sign: | N/A | | | `exclusions` | [Optional[models.ChatRestrictionFilters]](../../models/chatrestrictionfilters.md) | :heavy_minus_sign: | N/A | | | `timeout_millis` | *Optional[int]* | :heavy_minus_sign: | Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. | 30000 | -| `session_info` | [Optional[models.SessionInfo]](../../models/sessioninfo.md) | :heavy_minus_sign: | N/A | | | `application_id` | *Optional[str]* | :heavy_minus_sign: | The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. | | | `agent_id` | *Optional[str]* | :heavy_minus_sign: | The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. | | | `stream` | *Optional[bool]* | :heavy_minus_sign: | If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. | | diff --git a/pyproject.toml b/pyproject.toml index 3a401d37..3cce13d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "glean-api-client" -version = "0.10.3" +version = "0.11.0" description = "Python Client SDK Generated by Speakeasy." authors = [{ name = "Glean Technologies, Inc." }] license = "MIT" diff --git a/speakeasyusagegen/.speakeasy/logs/naming.log b/speakeasyusagegen/.speakeasy/logs/naming.log index eef6e0bb..82487f07 100644 --- a/speakeasyusagegen/.speakeasy/logs/naming.log +++ b/speakeasyusagegen/.speakeasy/logs/naming.log @@ -586,9 +586,9 @@ CreateauthtokenResponse (HttpMeta: HTTPMetadata, CreateAuthTokenResponse: Create CreateAuthTokenResponse (token: string, expirationTime: integer) Chat (SDK empty) ChatRequest (timezoneOffset: integer, ChatRequest: ChatRequest) - ChatRequest (saveChat: boolean, chatId: string, messages: array ...) + ChatRequest (messages: array, sessionInfo: SessionInfo, saveChat: boolean ...) ChatMessage (agentConfig: AgentConfig, author: enum, citations: array ...) - AgentConfig (agent: enum, toolSets: ToolSets, mode: enum) + AgentConfig (agent: enum, toolSets: ToolSets, mode: enum ...) AgentEnum (enum: DEFAULT, GPT, UNIVERSAL ...) ToolSets (enableWebSearch: boolean, enableCompanyTools: boolean) Mode (enum: DEFAULT, QUICK) diff --git a/src/glean/api_client/_version.py b/src/glean/api_client/_version.py index ddd34f57..91493c6b 100644 --- a/src/glean/api_client/_version.py +++ b/src/glean/api_client/_version.py @@ -3,10 +3,10 @@ import importlib.metadata __title__: str = "glean" -__version__: str = "0.10.3" +__version__: str = "0.11.0" __openapi_doc_version__: str = "0.9.0" -__gen_version__: str = "2.723.11" -__user_agent__: str = "speakeasy-sdk/python 0.10.3 2.723.11 0.9.0 glean" +__gen_version__: str = "2.728.0" +__user_agent__: str = "speakeasy-sdk/python 0.11.0 2.728.0 0.9.0 glean" try: if __package__ is not None: diff --git a/src/glean/api_client/client_chat.py b/src/glean/api_client/client_chat.py index b6213616..87e2d4b8 100644 --- a/src/glean/api_client/client_chat.py +++ b/src/glean/api_client/client_chat.py @@ -15,6 +15,9 @@ def create( *, messages: Union[List[models.ChatMessage], List[models.ChatMessageTypedDict]], timezone_offset: Optional[int] = None, + session_info: Optional[ + Union[models.SessionInfo, models.SessionInfoTypedDict] + ] = None, save_chat: Optional[bool] = None, chat_id: Optional[str] = None, agent_config: Optional[ @@ -27,9 +30,6 @@ def create( Union[models.ChatRestrictionFilters, models.ChatRestrictionFiltersTypedDict] ] = None, timeout_millis: Optional[int] = None, - session_info: Optional[ - Union[models.SessionInfo, models.SessionInfoTypedDict] - ] = None, application_id: Optional[str] = None, agent_id: Optional[str] = None, stream: Optional[bool] = None, @@ -44,13 +44,13 @@ def create( :param messages: A list of chat messages, from most recent to least recent. At least one message must specify a USER author. :param timezone_offset: The offset of the client's timezone in minutes from UTC. e.g. PDT is -420 because it's 7 hours behind UTC. + :param session_info: :param save_chat: Save the current interaction as a Chat for the user to access and potentially continue later. :param chat_id: The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. :param agent_config: Describes the agent that executes the request. :param inclusions: :param exclusions: :param timeout_millis: Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. - :param session_info: :param application_id: The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. :param agent_id: The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. :param stream: If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. @@ -72,9 +72,12 @@ def create( request = models.ChatRequestRequest( timezone_offset=timezone_offset, chat_request=models.ChatRequest( + messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), + session_info=utils.get_pydantic_model( + session_info, Optional[models.SessionInfo] + ), save_chat=save_chat, chat_id=chat_id, - messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), agent_config=utils.get_pydantic_model( agent_config, Optional[models.AgentConfig] ), @@ -85,9 +88,6 @@ def create( exclusions, Optional[models.ChatRestrictionFilters] ), timeout_millis=timeout_millis, - session_info=utils.get_pydantic_model( - session_info, Optional[models.SessionInfo] - ), application_id=application_id, agent_id=agent_id, stream=stream, @@ -152,6 +152,9 @@ async def create_async( *, messages: Union[List[models.ChatMessage], List[models.ChatMessageTypedDict]], timezone_offset: Optional[int] = None, + session_info: Optional[ + Union[models.SessionInfo, models.SessionInfoTypedDict] + ] = None, save_chat: Optional[bool] = None, chat_id: Optional[str] = None, agent_config: Optional[ @@ -164,9 +167,6 @@ async def create_async( Union[models.ChatRestrictionFilters, models.ChatRestrictionFiltersTypedDict] ] = None, timeout_millis: Optional[int] = None, - session_info: Optional[ - Union[models.SessionInfo, models.SessionInfoTypedDict] - ] = None, application_id: Optional[str] = None, agent_id: Optional[str] = None, stream: Optional[bool] = None, @@ -181,13 +181,13 @@ async def create_async( :param messages: A list of chat messages, from most recent to least recent. At least one message must specify a USER author. :param timezone_offset: The offset of the client's timezone in minutes from UTC. e.g. PDT is -420 because it's 7 hours behind UTC. + :param session_info: :param save_chat: Save the current interaction as a Chat for the user to access and potentially continue later. :param chat_id: The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. :param agent_config: Describes the agent that executes the request. :param inclusions: :param exclusions: :param timeout_millis: Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. - :param session_info: :param application_id: The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. :param agent_id: The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. :param stream: If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. @@ -209,9 +209,12 @@ async def create_async( request = models.ChatRequestRequest( timezone_offset=timezone_offset, chat_request=models.ChatRequest( + messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), + session_info=utils.get_pydantic_model( + session_info, Optional[models.SessionInfo] + ), save_chat=save_chat, chat_id=chat_id, - messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), agent_config=utils.get_pydantic_model( agent_config, Optional[models.AgentConfig] ), @@ -222,9 +225,6 @@ async def create_async( exclusions, Optional[models.ChatRestrictionFilters] ), timeout_millis=timeout_millis, - session_info=utils.get_pydantic_model( - session_info, Optional[models.SessionInfo] - ), application_id=application_id, agent_id=agent_id, stream=stream, @@ -1759,6 +1759,9 @@ def create_stream( *, messages: Union[List[models.ChatMessage], List[models.ChatMessageTypedDict]], timezone_offset: Optional[int] = None, + session_info: Optional[ + Union[models.SessionInfo, models.SessionInfoTypedDict] + ] = None, save_chat: Optional[bool] = None, chat_id: Optional[str] = None, agent_config: Optional[ @@ -1771,9 +1774,6 @@ def create_stream( Union[models.ChatRestrictionFilters, models.ChatRestrictionFiltersTypedDict] ] = None, timeout_millis: Optional[int] = None, - session_info: Optional[ - Union[models.SessionInfo, models.SessionInfoTypedDict] - ] = None, application_id: Optional[str] = None, agent_id: Optional[str] = None, stream: Optional[bool] = None, @@ -1788,13 +1788,13 @@ def create_stream( :param messages: A list of chat messages, from most recent to least recent. At least one message must specify a USER author. :param timezone_offset: The offset of the client's timezone in minutes from UTC. e.g. PDT is -420 because it's 7 hours behind UTC. + :param session_info: :param save_chat: Save the current interaction as a Chat for the user to access and potentially continue later. :param chat_id: The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. :param agent_config: Describes the agent that executes the request. :param inclusions: :param exclusions: :param timeout_millis: Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. - :param session_info: :param application_id: The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. :param agent_id: The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. :param stream: If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. @@ -1816,9 +1816,12 @@ def create_stream( request = models.ChatStreamRequest( timezone_offset=timezone_offset, chat_request=models.ChatRequest( + messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), + session_info=utils.get_pydantic_model( + session_info, Optional[models.SessionInfo] + ), save_chat=save_chat, chat_id=chat_id, - messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), agent_config=utils.get_pydantic_model( agent_config, Optional[models.AgentConfig] ), @@ -1829,9 +1832,6 @@ def create_stream( exclusions, Optional[models.ChatRestrictionFilters] ), timeout_millis=timeout_millis, - session_info=utils.get_pydantic_model( - session_info, Optional[models.SessionInfo] - ), application_id=application_id, agent_id=agent_id, stream=stream, @@ -1896,6 +1896,9 @@ async def create_stream_async( *, messages: Union[List[models.ChatMessage], List[models.ChatMessageTypedDict]], timezone_offset: Optional[int] = None, + session_info: Optional[ + Union[models.SessionInfo, models.SessionInfoTypedDict] + ] = None, save_chat: Optional[bool] = None, chat_id: Optional[str] = None, agent_config: Optional[ @@ -1908,9 +1911,6 @@ async def create_stream_async( Union[models.ChatRestrictionFilters, models.ChatRestrictionFiltersTypedDict] ] = None, timeout_millis: Optional[int] = None, - session_info: Optional[ - Union[models.SessionInfo, models.SessionInfoTypedDict] - ] = None, application_id: Optional[str] = None, agent_id: Optional[str] = None, stream: Optional[bool] = None, @@ -1925,13 +1925,13 @@ async def create_stream_async( :param messages: A list of chat messages, from most recent to least recent. At least one message must specify a USER author. :param timezone_offset: The offset of the client's timezone in minutes from UTC. e.g. PDT is -420 because it's 7 hours behind UTC. + :param session_info: :param save_chat: Save the current interaction as a Chat for the user to access and potentially continue later. :param chat_id: The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. :param agent_config: Describes the agent that executes the request. :param inclusions: :param exclusions: :param timeout_millis: Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. - :param session_info: :param application_id: The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. :param agent_id: The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. :param stream: If set, response lines will be streamed one-by-one as they become available. Each will be a ChatResponse, formatted as JSON, and separated by a new line. If false, the entire response will be returned at once. Note that if this is set and the model being used does not support streaming, the model's response will not be streamed, but other messages from the endpoint still will be. @@ -1953,9 +1953,12 @@ async def create_stream_async( request = models.ChatStreamRequest( timezone_offset=timezone_offset, chat_request=models.ChatRequest( + messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), + session_info=utils.get_pydantic_model( + session_info, Optional[models.SessionInfo] + ), save_chat=save_chat, chat_id=chat_id, - messages=utils.get_pydantic_model(messages, List[models.ChatMessage]), agent_config=utils.get_pydantic_model( agent_config, Optional[models.AgentConfig] ), @@ -1966,9 +1969,6 @@ async def create_stream_async( exclusions, Optional[models.ChatRestrictionFilters] ), timeout_millis=timeout_millis, - session_info=utils.get_pydantic_model( - session_info, Optional[models.SessionInfo] - ), application_id=application_id, agent_id=agent_id, stream=stream, diff --git a/src/glean/api_client/models/agentconfig.py b/src/glean/api_client/models/agentconfig.py index f1efda67..15cf8820 100644 --- a/src/glean/api_client/models/agentconfig.py +++ b/src/glean/api_client/models/agentconfig.py @@ -35,6 +35,8 @@ class AgentConfigTypedDict(TypedDict): r"""The types of tools that the agent is allowed to use. Only works with FAST and ADVANCED `agent` values""" mode: NotRequired[Mode] r"""Top level modes to run GleanChat in.""" + use_image_generation: NotRequired[bool] + r"""Whether the agent should create an image.""" class AgentConfig(BaseModel): @@ -48,3 +50,8 @@ class AgentConfig(BaseModel): mode: Optional[Mode] = None r"""Top level modes to run GleanChat in.""" + + use_image_generation: Annotated[ + Optional[bool], pydantic.Field(alias="useImageGeneration") + ] = None + r"""Whether the agent should create an image.""" diff --git a/src/glean/api_client/models/chatrequest.py b/src/glean/api_client/models/chatrequest.py index ca45dc63..b04c63c2 100644 --- a/src/glean/api_client/models/chatrequest.py +++ b/src/glean/api_client/models/chatrequest.py @@ -15,8 +15,11 @@ class ChatRequestTypedDict(TypedDict): + r"""The minimal set of fields that form a chat request.""" + messages: List[ChatMessageTypedDict] r"""A list of chat messages, from most recent to least recent. At least one message must specify a USER author.""" + session_info: NotRequired[SessionInfoTypedDict] save_chat: NotRequired[bool] r"""Save the current interaction as a Chat for the user to access and potentially continue later.""" chat_id: NotRequired[str] @@ -27,7 +30,6 @@ class ChatRequestTypedDict(TypedDict): exclusions: NotRequired[ChatRestrictionFiltersTypedDict] timeout_millis: NotRequired[int] r"""Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer.""" - session_info: NotRequired[SessionInfoTypedDict] application_id: NotRequired[str] r"""The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used.""" agent_id: NotRequired[str] @@ -37,9 +39,15 @@ class ChatRequestTypedDict(TypedDict): class ChatRequest(BaseModel): + r"""The minimal set of fields that form a chat request.""" + messages: List[ChatMessage] r"""A list of chat messages, from most recent to least recent. At least one message must specify a USER author.""" + session_info: Annotated[ + Optional[SessionInfo], pydantic.Field(alias="sessionInfo") + ] = None + save_chat: Annotated[Optional[bool], pydantic.Field(alias="saveChat")] = None r"""Save the current interaction as a Chat for the user to access and potentially continue later.""" @@ -60,10 +68,6 @@ class ChatRequest(BaseModel): ) r"""Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer.""" - session_info: Annotated[ - Optional[SessionInfo], pydantic.Field(alias="sessionInfo") - ] = None - application_id: Annotated[Optional[str], pydantic.Field(alias="applicationId")] = ( None ) diff --git a/tests/mockserver/internal/handler/pathpostrestapiv1createcollection.go b/tests/mockserver/internal/handler/pathpostrestapiv1createcollection.go index 5c99a0ad..371b9023 100644 --- a/tests/mockserver/internal/handler/pathpostrestapiv1createcollection.go +++ b/tests/mockserver/internal/handler/pathpostrestapiv1createcollection.go @@ -56,14 +56,14 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ operations.ResponseBody2{ Collection: &components.Collection{ Name: "", - Description: "motionless whenever paintwork import over cuckoo", + Description: "whole busily jive hawk gee basic minus hence", AddedRoles: []components.UserRoleSpecification{ components.UserRoleSpecification{ Person: &components.Person{ Name: "George Clooney", ObfuscatedID: "abc123", }, - Role: components.UserRoleOwner, + Role: components.UserRoleAnswerModerator, }, }, RemovedRoles: []components.UserRoleSpecification{ @@ -90,7 +90,7 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ }, }, }, - ID: 363711, + ID: 158969, Creator: &components.Person{ Name: "George Clooney", ObfuscatedID: "abc123", @@ -101,7 +101,7 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ }, Items: []components.CollectionItem{ components.CollectionItem{ - CollectionID: 570197, + CollectionID: 110375, CreatedBy: &components.Person{ Name: "George Clooney", ObfuscatedID: "abc123", @@ -266,13 +266,13 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ Name: "George Clooney", ObfuscatedID: "abc123", }, - Role: components.UserRoleAnswerModerator, + Role: components.UserRoleEditor, }, }, }, Collection: &components.Collection{ Name: "", - Description: "athwart skateboard newsstand farm bourgeoisie ah how elliptical aha well-to-do", + Description: "wedge colorfully orientate rally", AudienceFilters: []components.FacetFilter{ components.FacetFilter{ FieldName: types.String("type"), @@ -288,7 +288,7 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ }, }, }, - ID: 643990, + ID: 131797, Creator: &components.Person{ Name: "George Clooney", ObfuscatedID: "abc123", @@ -300,7 +300,7 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ Children: []components.Collection{ components.Collection{ Name: "", - Description: "woot purse salty even as advanced", + Description: "outside yippee sidetrack mature regularly mouser inject worth", AudienceFilters: []components.FacetFilter{ components.FacetFilter{ FieldName: types.String("type"), @@ -316,7 +316,7 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ }, }, }, - ID: 359978, + ID: 149448, Creator: &components.Person{ Name: "George Clooney", ObfuscatedID: "abc123", @@ -328,7 +328,7 @@ func testCreatecollectionCreatecollection0(w http.ResponseWriter, req *http.Requ }, }, }, - ItemType: components.CollectionItemItemTypeURL, + ItemType: components.CollectionItemItemTypeCollection, }, }, }, diff --git a/tests/mockserver/internal/sdk/models/components/agentconfig.go b/tests/mockserver/internal/sdk/models/components/agentconfig.go index dcc70f4e..be2e03e8 100644 --- a/tests/mockserver/internal/sdk/models/components/agentconfig.go +++ b/tests/mockserver/internal/sdk/models/components/agentconfig.go @@ -78,6 +78,8 @@ type AgentConfig struct { ToolSets *ToolSets `json:"toolSets,omitempty"` // Top level modes to run GleanChat in. Mode *Mode `json:"mode,omitempty"` + // Whether the agent should create an image. + UseImageGeneration *bool `json:"useImageGeneration,omitempty"` } func (o *AgentConfig) GetAgent() *AgentEnum { @@ -100,3 +102,10 @@ func (o *AgentConfig) GetMode() *Mode { } return o.Mode } + +func (o *AgentConfig) GetUseImageGeneration() *bool { + if o == nil { + return nil + } + return o.UseImageGeneration +} diff --git a/tests/mockserver/internal/sdk/models/components/chatrequest.go b/tests/mockserver/internal/sdk/models/components/chatrequest.go index d26c7a1c..af53448a 100644 --- a/tests/mockserver/internal/sdk/models/components/chatrequest.go +++ b/tests/mockserver/internal/sdk/models/components/chatrequest.go @@ -2,20 +2,21 @@ package components +// ChatRequest - The minimal set of fields that form a chat request. type ChatRequest struct { + // A list of chat messages, from most recent to least recent. At least one message must specify a USER author. + Messages []ChatMessage `json:"messages"` + SessionInfo *SessionInfo `json:"sessionInfo,omitempty"` // Save the current interaction as a Chat for the user to access and potentially continue later. SaveChat *bool `json:"saveChat,omitempty"` // The id of the Chat that context should be retrieved from and messages added to. An empty id starts a new Chat, and the Chat is saved if saveChat is true. ChatID *string `json:"chatId,omitempty"` - // A list of chat messages, from most recent to least recent. At least one message must specify a USER author. - Messages []ChatMessage `json:"messages"` // Describes the agent that executes the request. AgentConfig *AgentConfig `json:"agentConfig,omitempty"` Inclusions *ChatRestrictionFilters `json:"inclusions,omitempty"` Exclusions *ChatRestrictionFilters `json:"exclusions,omitempty"` // Timeout in milliseconds for the request. A `408` error will be returned if handling the request takes longer. - TimeoutMillis *int64 `json:"timeoutMillis,omitempty"` - SessionInfo *SessionInfo `json:"sessionInfo,omitempty"` + TimeoutMillis *int64 `json:"timeoutMillis,omitempty"` // The ID of the application this request originates from, used to determine the configuration of underlying chat processes. This should correspond to the ID set during admin setup. If not specified, the default chat experience will be used. ApplicationID *string `json:"applicationId,omitempty"` // The ID of the Agent that should process this chat request. Only Agents with trigger set to 'User chat message' are invokable through this API. If not specified, the default chat experience will be used. @@ -24,25 +25,32 @@ type ChatRequest struct { Stream *bool `json:"stream,omitempty"` } -func (o *ChatRequest) GetSaveChat() *bool { +func (o *ChatRequest) GetMessages() []ChatMessage { + if o == nil { + return []ChatMessage{} + } + return o.Messages +} + +func (o *ChatRequest) GetSessionInfo() *SessionInfo { if o == nil { return nil } - return o.SaveChat + return o.SessionInfo } -func (o *ChatRequest) GetChatID() *string { +func (o *ChatRequest) GetSaveChat() *bool { if o == nil { return nil } - return o.ChatID + return o.SaveChat } -func (o *ChatRequest) GetMessages() []ChatMessage { +func (o *ChatRequest) GetChatID() *string { if o == nil { - return []ChatMessage{} + return nil } - return o.Messages + return o.ChatID } func (o *ChatRequest) GetAgentConfig() *AgentConfig { @@ -73,13 +81,6 @@ func (o *ChatRequest) GetTimeoutMillis() *int64 { return o.TimeoutMillis } -func (o *ChatRequest) GetSessionInfo() *SessionInfo { - if o == nil { - return nil - } - return o.SessionInfo -} - func (o *ChatRequest) GetApplicationID() *string { if o == nil { return nil From 43b7f2c6e4b785809ad84e0f40eef1818e9b5790 Mon Sep 17 00:00:00 2001 From: "speakeasy-github[bot]" <128539517+speakeasy-github[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 02:11:46 +0000 Subject: [PATCH 2/2] empty commit to trigger [run-tests] workflow