diff --git a/sdk/ai/azure-ai-projects/.env.template b/sdk/ai/azure-ai-projects/.env.template index ca5a9a9beb85..5caf8c151143 100644 --- a/sdk/ai/azure-ai-projects/.env.template +++ b/sdk/ai/azure-ai-projects/.env.template @@ -9,10 +9,15 @@ # 3. Run the tests (`pytest`) or run samples in the `samples` folder # +# Set to true for console logging +AZURE_AI_PROJECTS_CONSOLE_LOGGING= + ####################################################################### # # Used in samples # +####################################################################### + # Project endpoint has the format: # `https://.services.ai.azure.com/api/projects/` AZURE_AI_PROJECT_ENDPOINT= @@ -23,12 +28,8 @@ CONNECTION_NAME= AZURE_AI_PROJECTS_AZURE_SUBSCRIPTION_ID= AZURE_AI_PROJECTS_AZURE_RESOURCE_GROUP= AZURE_AI_PROJECTS_AZURE_AOAI_ACCOUNT= - -# Used in Memory Store samples MEMORY_STORE_CHAT_MODEL_DEPLOYMENT_NAME= MEMORY_STORE_EMBEDDING_MODEL_DEPLOYMENT_NAME= - -# Used in Agent tools samples IMAGE_GENERATION_MODEL_DEPLOYMENT_NAME= BING_PROJECT_CONNECTION_ID= MCP_PROJECT_CONNECTION_ID= @@ -50,6 +51,7 @@ A2A_USER_INPUT= ####################################################################### # # Used in tests +# ####################################################################### # Used for recording or playback diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index 9e6b24b66a3f..6d85f732a306 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -21,7 +21,10 @@ * Rename class `MemorySearchTool` to `MemorySearchPreviewTool`. * Rename class `MicrosoftFabricAgentTool` to `MicrosoftFabricPreviewTool`. * Rename class `SharepointAgentTool` to `SharepointPreviewTool`. -* Rename class `ItemParam` to `InputItem`. +* Other class renames: + * Rename class `ItemParam` to `InputItem`. + * Rename class `PromptAgentDefinitionText` to `PromptAgentDefinitionTextOptions` + * Rename class `EvaluationComparisonRequest` to `InsightRequest` * Tracing: workflow actions in conversation item listings are now emitted as "gen_ai.conversation.item" events (with role="workflow") instead of "gen_ai.workflow.action" events in the list_conversation_items span. * Tracing: response generation span names changed from "responses {model_name}" to "chat {model_name}" for model calls and from "responses {agent_name}" to "invoke_agent {agent_name}" for agent calls. * Tracing: response generation operation names changed from "responses" to "chat" for model calls and from "responses" to "invoke_agent" for agent calls. @@ -29,6 +32,10 @@ * Tracing: agent creation uses gen_ai.system.instructions attribute directly under the span instead of an event. * Tracing: "gen_ai.provider.name" attribute value changed to "microsoft.foundry". +### Sample updates + +* Add and update samples for `WebSearchTool` and `WebSearchPreviewTool` + ## 2.0.0b3 (2026-01-06) ### Features Added diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 86e711138dcb..2acfb8db9b7c 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -20,7 +20,7 @@ resources in your Microsoft Foundry Project. Use it to: * Model Context Protocol (MCP) * OpenAPI * SharePoint - * Web Search + * Web Search/Web Search Preview * **Get an OpenAI client** using `.get_openai_client()` method to run Responses, Conversations, Evals and FineTuning operations with your Agent. * **Manage memory stores** for Agent conversations, using the `.memory_stores` operations. * **Explore additional evaluation tools** to assess the performance of your generative AI application, using the `.evaluation_rules`, @@ -32,7 +32,7 @@ resources in your Microsoft Foundry Project. Use it to: * **Upload documents and create Datasets** to reference them using the `.datasets` operations. * **Create and enumerate Search Indexes** using methods the `.indexes` operations. -The client library uses version `2025-11-15-preview` of the AI Foundry [data plane REST APIs](https://aka.ms/azsdk/azure-ai-projects-v2/api-reference-2025-11-15-preview). +The client library uses version `v1` of the AI Foundry [data plane REST APIs](https://aka.ms/azsdk/azure-ai-projects-v2/api-reference-v1). [Product documentation](https://aka.ms/azsdk/azure-ai-projects-v2/product-doc) | [Samples][samples] @@ -64,12 +64,6 @@ To report an issue with the client library, or request additional features, plea pip install --pre azure-ai-projects ``` -Note that the packages [openai](https://pypi.org/project/openai) and [azure-identity](https://pypi.org/project/azure-identity) also need to be installed if you intend to call `get_openai_client()`: - -```bash -pip install openai azure-identity -``` - ## Key concepts ### Create and authenticate the client with Entra ID @@ -288,20 +282,48 @@ if image_data and image_data[0]: See the full sample in file `\agents\tools\sample_agent_image_generation.py` in the [Samples][samples] folder. -**Web Search** +**Web Search/Web Search Preview** -Perform general web searches to retrieve current information from the internet. [OpenAI Documentation](https://platform.openai.com/docs/guides/tools-web-search) +Discover up-to-date web content with the GA Web Search tool or try the Web Search Preview tool for the latest enhancements. Guidance on when to use each option is in the documentation: https://learn.microsoft.com/azure/ai-foundry/agents/how-to/tools/web-overview?view=foundry#determine-the-best-tool-for-your-use-cases. ```python -tool = WebSearchPreviewTool(user_location=ApproximateLocation(country="GB", city="London", region="London")) +tool = WebSearchTool(user_location=WebSearchApproximateLocation(country="GB", city="London", region="London")) ``` See the full sample in file `\agents\tools\sample_agent_web_search.py` in the [Samples][samples] folder. + + +```python +tool = WebSearchPreviewTool(user_location=ApproximateLocation(country="GB", city="London", region="London")) +``` + + + +See the full sample in file `\agents\tools\sample_agent_web_search_preview.py` in the [Samples][samples] folder. + +Use the GA Web Search tool with a Bing Custom Search connection to scope results to your custom search instance: + + + +```python +tool = WebSearchTool( + custom_search_configuration=WebSearchConfiguration( + project_connection_id=os.environ["BING_CUSTOM_SEARCH_PROJECT_CONNECTION_ID"], + instance_name=os.environ["BING_CUSTOM_SEARCH_INSTANCE_NAME"], + ) +) +``` + + + +See the full sample in file `\agents\tools\sample_agent_web_search_with_custom_search.py` in the [Samples][samples] folder. + + **Computer Use** Enable agents to interact directly with computer systems for task automation and system operations: @@ -346,7 +368,7 @@ Call external APIs defined by OpenAPI specifications without additional client-s ```python with open(weather_asset_file_path, "r") as f: - openapi_weather = jsonref.loads(f.read()) + openapi_weather = cast(dict[str, Any], jsonref.loads(f.read())) tool = OpenApiTool( openapi=OpenApiFunctionDefinition( @@ -594,7 +616,7 @@ Call external APIs defined by OpenAPI specifications using project connection au ```python with open(tripadvisor_asset_file_path, "r") as f: - openapi_tripadvisor = jsonref.loads(f.read()) + openapi_tripadvisor = cast(dict[str, Any], jsonref.loads(f.read())) tool = OpenApiTool( openapi=OpenApiFunctionDefinition( @@ -954,7 +976,7 @@ You can add an Application Insights Azure resource to your Microsoft Foundry pro Make sure to install OpenTelemetry and the Azure SDK tracing plugin via ```bash -pip install "azure-ai-projects>=2.0.0b1" azure-identity opentelemetry-sdk azure-core-tracing-opentelemetry azure-monitor-opentelemetry +pip install "azure-ai-projects>=2.0.0b4" opentelemetry-sdk azure-core-tracing-opentelemetry azure-monitor-opentelemetry ``` You will also need an exporter to send telemetry to your observability backend. You can print traces to the console or use a local viewer such as [Aspire Dashboard](https://learn.microsoft.com/dotnet/aspire/fundamentals/dashboard/standalone?tabs=bash). diff --git a/sdk/ai/azure-ai-projects/_metadata.json b/sdk/ai/azure-ai-projects/_metadata.json index 509044270b48..539264d9b9bb 100644 --- a/sdk/ai/azure-ai-projects/_metadata.json +++ b/sdk/ai/azure-ai-projects/_metadata.json @@ -1,3 +1,3 @@ { - "apiVersion": "2025-11-15-preview" + "apiVersion": "v1" } \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects/_tsp-location.yaml b/sdk/ai/azure-ai-projects/_tsp-location.yaml deleted file mode 100644 index 00631b52071a..000000000000 --- a/sdk/ai/azure-ai-projects/_tsp-location.yaml +++ /dev/null @@ -1,4 +0,0 @@ -directory: specification/ai/Foundry -commit: 51415f0131fc0e65f88e8a75b4b729a528e8e959 -repo: Azure/azure-rest-api-specs -additionalDirectories: diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json index bfc2a677bb06..272bf546c927 100644 --- a/sdk/ai/azure-ai-projects/apiview-properties.json +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -3,17 +3,15 @@ "CrossLanguageDefinitionId": { "azure.ai.projects.models.Tool": "OpenAI.Tool", "azure.ai.projects.models.A2APreviewTool": "Azure.AI.Projects.A2APreviewTool", + "azure.ai.projects.models.InsightRequest": "Azure.AI.Projects.InsightRequest", + "azure.ai.projects.models.AgentClusterInsightRequest": "Azure.AI.Projects.AgentClusterInsightRequest", "azure.ai.projects.models.InsightResult": "Azure.AI.Projects.InsightResult", "azure.ai.projects.models.AgentClusterInsightResult": "Azure.AI.Projects.AgentClusterInsightResult", - "azure.ai.projects.models.InsightRequest": "Azure.AI.Projects.InsightRequest", - "azure.ai.projects.models.AgentClusterInsightsRequest": "Azure.AI.Projects.AgentClusterInsightsRequest", "azure.ai.projects.models.AgentDefinition": "Azure.AI.Projects.AgentDefinition", "azure.ai.projects.models.AgentDetails": "Azure.AI.Projects.AgentObject", "azure.ai.projects.models.BaseCredentials": "Azure.AI.Projects.BaseCredentials", - "azure.ai.projects.models.AgenticIdentityCredentials": "Azure.AI.Projects.AgenticIdentityCredentials", - "azure.ai.projects.models.AgentId": "Azure.AI.Projects.AgentId", + "azure.ai.projects.models.AgenticIdentityPreviewCredentials": "Azure.AI.Projects.AgenticIdentityPreviewCredentials", "azure.ai.projects.models.AgentObjectVersions": "Azure.AI.Projects.AgentObject.versions.anonymous", - "azure.ai.projects.models.AgentReference": "Azure.AI.Projects.AgentReference", "azure.ai.projects.models.EvaluationTaxonomyInput": "Azure.AI.Projects.EvaluationTaxonomyInput", "azure.ai.projects.models.AgentTaxonomyInput": "Azure.AI.Projects.AgentTaxonomyInput", "azure.ai.projects.models.AgentVersionDetails": "Azure.AI.Projects.AgentVersionObject", @@ -21,18 +19,15 @@ "azure.ai.projects.models.Annotation": "OpenAI.Annotation", "azure.ai.projects.models.ApiErrorResponse": "Azure.AI.Projects.ApiErrorResponse", "azure.ai.projects.models.ApiKeyCredentials": "Azure.AI.Projects.ApiKeyCredentials", - "azure.ai.projects.models.ApplyPatchFileOperation": "OpenAI.ApplyPatchFileOperation", - "azure.ai.projects.models.ApplyPatchCreateFileOperation": "OpenAI.ApplyPatchCreateFileOperation", "azure.ai.projects.models.ApplyPatchOperationParam": "OpenAI.ApplyPatchOperationParam", "azure.ai.projects.models.ApplyPatchCreateFileOperationParam": "OpenAI.ApplyPatchCreateFileOperationParam", - "azure.ai.projects.models.ApplyPatchDeleteFileOperation": "OpenAI.ApplyPatchDeleteFileOperation", "azure.ai.projects.models.ApplyPatchDeleteFileOperationParam": "OpenAI.ApplyPatchDeleteFileOperationParam", "azure.ai.projects.models.ApplyPatchToolParam": "OpenAI.ApplyPatchToolParam", - "azure.ai.projects.models.ApplyPatchUpdateFileOperation": "OpenAI.ApplyPatchUpdateFileOperation", "azure.ai.projects.models.ApplyPatchUpdateFileOperationParam": "OpenAI.ApplyPatchUpdateFileOperationParam", "azure.ai.projects.models.ApproximateLocation": "OpenAI.ApproximateLocation", "azure.ai.projects.models.Target": "Azure.AI.Projects.Target", "azure.ai.projects.models.AzureAIAgentTarget": "Azure.AI.Projects.AzureAIAgentTarget", + "azure.ai.projects.models.AzureAIModelTarget": "Azure.AI.Projects.AzureAIModelTarget", "azure.ai.projects.models.Index": "Azure.AI.Projects.Index", "azure.ai.projects.models.AzureAISearchIndex": "Azure.AI.Projects.AzureAISearchIndex", "azure.ai.projects.models.AzureAISearchTool": "Azure.AI.Projects.AzureAISearchTool", @@ -80,7 +75,6 @@ "azure.ai.projects.models.EvaluationRuleAction": "Azure.AI.Projects.EvaluationRuleAction", "azure.ai.projects.models.ContinuousEvaluationRuleAction": "Azure.AI.Projects.ContinuousEvaluationRuleAction", "azure.ai.projects.models.CosmosDBIndex": "Azure.AI.Projects.CosmosDBIndex", - "azure.ai.projects.models.CreatedBy": "Azure.AI.Projects.CreatedBy", "azure.ai.projects.models.Trigger": "Azure.AI.Projects.Trigger", "azure.ai.projects.models.CronTrigger": "Azure.AI.Projects.CronTrigger", "azure.ai.projects.models.CustomCredential": "Azure.AI.Projects.CustomCredential", @@ -104,18 +98,18 @@ "azure.ai.projects.models.EmbeddingConfiguration": "Azure.AI.Projects.EmbeddingConfiguration", "azure.ai.projects.models.EntraIDCredentials": "Azure.AI.Projects.EntraIDCredentials", "azure.ai.projects.models.Error": "OpenAI.Error", - "azure.ai.projects.models.EvalCompareReport": "Azure.AI.Projects.EvalCompareReport", "azure.ai.projects.models.EvalResult": "Azure.AI.Projects.EvalResult", "azure.ai.projects.models.EvalRunResultCompareItem": "Azure.AI.Projects.EvalRunResultCompareItem", "azure.ai.projects.models.EvalRunResultComparison": "Azure.AI.Projects.EvalRunResultComparison", "azure.ai.projects.models.EvalRunResultSummary": "Azure.AI.Projects.EvalRunResultSummary", - "azure.ai.projects.models.EvaluationComparisonRequest": "Azure.AI.Projects.EvaluationComparisonRequest", + "azure.ai.projects.models.EvaluationComparisonInsightRequest": "Azure.AI.Projects.EvaluationComparisonInsightRequest", + "azure.ai.projects.models.EvaluationComparisonInsightResult": "Azure.AI.Projects.EvaluationComparisonInsightResult", "azure.ai.projects.models.InsightSample": "Azure.AI.Projects.InsightSample", "azure.ai.projects.models.EvaluationResultSample": "Azure.AI.Projects.EvaluationResultSample", "azure.ai.projects.models.EvaluationRule": "Azure.AI.Projects.EvaluationRule", "azure.ai.projects.models.EvaluationRuleFilter": "Azure.AI.Projects.EvaluationRuleFilter", + "azure.ai.projects.models.EvaluationRunClusterInsightRequest": "Azure.AI.Projects.EvaluationRunClusterInsightRequest", "azure.ai.projects.models.EvaluationRunClusterInsightResult": "Azure.AI.Projects.EvaluationRunClusterInsightResult", - "azure.ai.projects.models.EvaluationRunClusterInsightsRequest": "Azure.AI.Projects.EvaluationRunClusterInsightsRequest", "azure.ai.projects.models.ScheduleTask": "Azure.AI.Projects.ScheduleTask", "azure.ai.projects.models.EvaluationScheduleTask": "Azure.AI.Projects.EvaluationScheduleTask", "azure.ai.projects.models.EvaluationTaxonomy": "Azure.AI.Projects.EvaluationTaxonomy", @@ -133,15 +127,10 @@ "azure.ai.projects.models.FunctionAndCustomToolCallOutputInputFileContent": "OpenAI.FunctionAndCustomToolCallOutputInputFileContent", "azure.ai.projects.models.FunctionAndCustomToolCallOutputInputImageContent": "OpenAI.FunctionAndCustomToolCallOutputInputImageContent", "azure.ai.projects.models.FunctionAndCustomToolCallOutputInputTextContent": "OpenAI.FunctionAndCustomToolCallOutputInputTextContent", - "azure.ai.projects.models.FunctionShellAction": "OpenAI.FunctionShellAction", "azure.ai.projects.models.FunctionShellActionParam": "OpenAI.FunctionShellActionParam", - "azure.ai.projects.models.FunctionShellCallOutputContent": "OpenAI.FunctionShellCallOutputContent", "azure.ai.projects.models.FunctionShellCallOutputContentParam": "OpenAI.FunctionShellCallOutputContentParam", - "azure.ai.projects.models.FunctionShellCallOutputOutcome": "OpenAI.FunctionShellCallOutputOutcome", - "azure.ai.projects.models.FunctionShellCallOutputExitOutcome": "OpenAI.FunctionShellCallOutputExitOutcome", "azure.ai.projects.models.FunctionShellCallOutputOutcomeParam": "OpenAI.FunctionShellCallOutputOutcomeParam", "azure.ai.projects.models.FunctionShellCallOutputExitOutcomeParam": "OpenAI.FunctionShellCallOutputExitOutcomeParam", - "azure.ai.projects.models.FunctionShellCallOutputTimeoutOutcome": "OpenAI.FunctionShellCallOutputTimeoutOutcome", "azure.ai.projects.models.FunctionShellCallOutputTimeoutOutcomeParam": "OpenAI.FunctionShellCallOutputTimeoutOutcomeParam", "azure.ai.projects.models.FunctionShellToolParam": "OpenAI.FunctionShellToolParam", "azure.ai.projects.models.FunctionTool": "OpenAI.FunctionTool", @@ -149,7 +138,6 @@ "azure.ai.projects.models.HourlyRecurrenceSchedule": "Azure.AI.Projects.HourlyRecurrenceSchedule", "azure.ai.projects.models.HumanEvaluationRuleAction": "Azure.AI.Projects.HumanEvaluationRuleAction", "azure.ai.projects.models.HybridSearchOptions": "OpenAI.HybridSearchOptions", - "azure.ai.projects.models.ImageBasedHostedAgentDefinition": "Azure.AI.Projects.ImageBasedHostedAgentDefinition", "azure.ai.projects.models.ImageGenTool": "OpenAI.ImageGenTool", "azure.ai.projects.models.ImageGenToolInputImageMask": "OpenAI.ImageGenToolInputImageMask", "azure.ai.projects.models.InputContent": "OpenAI.InputContent", @@ -181,8 +169,6 @@ "azure.ai.projects.models.InputItemOutputMessage": "OpenAI.InputItemOutputMessage", "azure.ai.projects.models.InputItemReasoningItem": "OpenAI.InputItemReasoningItem", "azure.ai.projects.models.InputItemWebSearchToolCall": "OpenAI.InputItemWebSearchToolCall", - "azure.ai.projects.models.ItemResource": "OpenAI.ItemResource", - "azure.ai.projects.models.InputMessageResource": "OpenAI.InputMessageResource", "azure.ai.projects.models.InputTextContentParam": "OpenAI.InputTextContentParam", "azure.ai.projects.models.Insight": "Azure.AI.Projects.Insight", "azure.ai.projects.models.InsightCluster": "Azure.AI.Projects.InsightCluster", @@ -191,25 +177,6 @@ "azure.ai.projects.models.InsightsMetadata": "Azure.AI.Projects.InsightsMetadata", "azure.ai.projects.models.InsightSummary": "Azure.AI.Projects.InsightSummary", "azure.ai.projects.models.ItemReferenceParam": "OpenAI.ItemReferenceParam", - "azure.ai.projects.models.ItemResourceApplyPatchToolCall": "OpenAI.ItemResourceApplyPatchToolCall", - "azure.ai.projects.models.ItemResourceApplyPatchToolCallOutput": "OpenAI.ItemResourceApplyPatchToolCallOutput", - "azure.ai.projects.models.ItemResourceCodeInterpreterToolCall": "OpenAI.ItemResourceCodeInterpreterToolCall", - "azure.ai.projects.models.ItemResourceComputerToolCall": "OpenAI.ItemResourceComputerToolCall", - "azure.ai.projects.models.ItemResourceComputerToolCallOutputResource": "OpenAI.ItemResourceComputerToolCallOutputResource", - "azure.ai.projects.models.ItemResourceFileSearchToolCall": "OpenAI.ItemResourceFileSearchToolCall", - "azure.ai.projects.models.ItemResourceFunctionShellCall": "OpenAI.ItemResourceFunctionShellCall", - "azure.ai.projects.models.ItemResourceFunctionShellCallOutput": "OpenAI.ItemResourceFunctionShellCallOutput", - "azure.ai.projects.models.ItemResourceFunctionToolCallOutputResource": "OpenAI.ItemResourceFunctionToolCallOutputResource", - "azure.ai.projects.models.ItemResourceFunctionToolCallResource": "OpenAI.ItemResourceFunctionToolCallResource", - "azure.ai.projects.models.ItemResourceImageGenToolCall": "OpenAI.ItemResourceImageGenToolCall", - "azure.ai.projects.models.ItemResourceLocalShellToolCall": "OpenAI.ItemResourceLocalShellToolCall", - "azure.ai.projects.models.ItemResourceLocalShellToolCallOutput": "OpenAI.ItemResourceLocalShellToolCallOutput", - "azure.ai.projects.models.ItemResourceMcpApprovalRequest": "OpenAI.ItemResourceMcpApprovalRequest", - "azure.ai.projects.models.ItemResourceMcpApprovalResponseResource": "OpenAI.ItemResourceMcpApprovalResponseResource", - "azure.ai.projects.models.ItemResourceMcpListTools": "OpenAI.ItemResourceMcpListTools", - "azure.ai.projects.models.ItemResourceMcpToolCall": "OpenAI.ItemResourceMcpToolCall", - "azure.ai.projects.models.ItemResourceOutputMessage": "OpenAI.ItemResourceOutputMessage", - "azure.ai.projects.models.ItemResourceWebSearchToolCall": "OpenAI.ItemResourceWebSearchToolCall", "azure.ai.projects.models.KeyPressAction": "OpenAI.KeyPressAction", "azure.ai.projects.models.LocalShellExecAction": "OpenAI.LocalShellExecAction", "azure.ai.projects.models.LocalShellToolParam": "OpenAI.LocalShellToolParam", @@ -225,7 +192,6 @@ "azure.ai.projects.models.MemorySearchItem": "Azure.AI.Projects.MemorySearchItem", "azure.ai.projects.models.MemorySearchOptions": "Azure.AI.Projects.MemorySearchOptions", "azure.ai.projects.models.MemorySearchPreviewTool": "Azure.AI.Projects.MemorySearchPreviewTool", - "azure.ai.projects.models.MemorySearchToolCallItemResource": "Azure.AI.Projects.MemorySearchToolCallItemResource", "azure.ai.projects.models.MemoryStoreDefinition": "Azure.AI.Projects.MemoryStoreDefinition", "azure.ai.projects.models.MemoryStoreDefaultDefinition": "Azure.AI.Projects.MemoryStoreDefaultDefinition", "azure.ai.projects.models.MemoryStoreDefaultOptions": "Azure.AI.Projects.MemoryStoreDefaultOptions", @@ -238,10 +204,10 @@ "azure.ai.projects.models.MicrosoftFabricPreviewTool": "Azure.AI.Projects.MicrosoftFabricPreviewTool", "azure.ai.projects.models.ModelDeployment": "Azure.AI.Projects.ModelDeployment", "azure.ai.projects.models.ModelDeploymentSku": "Azure.AI.Projects.Sku", + "azure.ai.projects.models.ModelSamplingParams": "Azure.AI.Projects.ModelSamplingParams", "azure.ai.projects.models.MonthlyRecurrenceSchedule": "Azure.AI.Projects.MonthlyRecurrenceSchedule", "azure.ai.projects.models.Move": "OpenAI.Move", "azure.ai.projects.models.NoAuthenticationCredentials": "Azure.AI.Projects.NoAuthenticationCredentials", - "azure.ai.projects.models.OAuthConsentRequestItemResource": "Azure.AI.Projects.OAuthConsentRequestItemResource", "azure.ai.projects.models.OneTimeTrigger": "Azure.AI.Projects.OneTimeTrigger", "azure.ai.projects.models.OpenApiAuthDetails": "Azure.AI.Projects.OpenApiAuthDetails", "azure.ai.projects.models.OpenApiAnonymousAuthDetails": "Azure.AI.Projects.OpenApiAnonymousAuthDetails", @@ -252,14 +218,13 @@ "azure.ai.projects.models.OpenApiProjectConnectionAuthDetails": "Azure.AI.Projects.OpenApiProjectConnectionAuthDetails", "azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme": "Azure.AI.Projects.OpenApiProjectConnectionSecurityScheme", "azure.ai.projects.models.OpenApiTool": "Azure.AI.Projects.OpenApiTool", - "azure.ai.projects.models.OutputContent": "OpenAI.OutputContent", "azure.ai.projects.models.OutputMessageContent": "OpenAI.OutputMessageContent", "azure.ai.projects.models.OutputMessageContentOutputTextContent": "OpenAI.OutputMessageContentOutputTextContent", "azure.ai.projects.models.OutputMessageContentRefusalContent": "OpenAI.OutputMessageContentRefusalContent", "azure.ai.projects.models.PendingUploadRequest": "Azure.AI.Projects.PendingUploadRequest", "azure.ai.projects.models.PendingUploadResponse": "Azure.AI.Projects.PendingUploadResponse", "azure.ai.projects.models.PromptAgentDefinition": "Azure.AI.Projects.PromptAgentDefinition", - "azure.ai.projects.models.PromptAgentDefinitionText": "Azure.AI.Projects.PromptAgentDefinition.text.anonymous", + "azure.ai.projects.models.PromptAgentDefinitionTextOptions": "Azure.AI.Projects.PromptAgentDefinitionTextOptions", "azure.ai.projects.models.PromptBasedEvaluatorDefinition": "Azure.AI.Projects.PromptBasedEvaluatorDefinition", "azure.ai.projects.models.ProtocolVersionRecord": "Azure.AI.Projects.ProtocolVersionRecord", "azure.ai.projects.models.RaiConfig": "Azure.AI.Projects.RaiConfig", @@ -277,9 +242,11 @@ "azure.ai.projects.models.Scroll": "OpenAI.Scroll", "azure.ai.projects.models.SharepointGroundingToolParameters": "Azure.AI.Projects.SharepointGroundingToolParameters", "azure.ai.projects.models.SharepointPreviewTool": "Azure.AI.Projects.SharepointPreviewTool", + "azure.ai.projects.models.ToolChoiceParam": "OpenAI.ToolChoiceParam", + "azure.ai.projects.models.SpecificApplyPatchParam": "OpenAI.SpecificApplyPatchParam", + "azure.ai.projects.models.SpecificFunctionShellParam": "OpenAI.SpecificFunctionShellParam", "azure.ai.projects.models.StructuredInputDefinition": "Azure.AI.Projects.StructuredInputDefinition", "azure.ai.projects.models.StructuredOutputDefinition": "Azure.AI.Projects.StructuredOutputDefinition", - "azure.ai.projects.models.StructuredOutputsItemResource": "Azure.AI.Projects.StructuredOutputsItemResource", "azure.ai.projects.models.Summary": "OpenAI.Summary", "azure.ai.projects.models.TaxonomyCategory": "Azure.AI.Projects.TaxonomyCategory", "azure.ai.projects.models.TaxonomySubCategory": "Azure.AI.Projects.TaxonomySubCategory", @@ -287,6 +254,16 @@ "azure.ai.projects.models.TextResponseFormatConfigurationResponseFormatJsonObject": "OpenAI.TextResponseFormatConfigurationResponseFormatJsonObject", "azure.ai.projects.models.TextResponseFormatConfigurationResponseFormatText": "OpenAI.TextResponseFormatConfigurationResponseFormatText", "azure.ai.projects.models.TextResponseFormatJsonSchema": "OpenAI.TextResponseFormatJsonSchema", + "azure.ai.projects.models.ToolChoiceAllowed": "OpenAI.ToolChoiceAllowed", + "azure.ai.projects.models.ToolChoiceCodeInterpreter": "OpenAI.ToolChoiceCodeInterpreter", + "azure.ai.projects.models.ToolChoiceComputerUsePreview": "OpenAI.ToolChoiceComputerUsePreview", + "azure.ai.projects.models.ToolChoiceCustom": "OpenAI.ToolChoiceCustom", + "azure.ai.projects.models.ToolChoiceFileSearch": "OpenAI.ToolChoiceFileSearch", + "azure.ai.projects.models.ToolChoiceFunction": "OpenAI.ToolChoiceFunction", + "azure.ai.projects.models.ToolChoiceImageGeneration": "OpenAI.ToolChoiceImageGeneration", + "azure.ai.projects.models.ToolChoiceMCP": "OpenAI.ToolChoiceMCP", + "azure.ai.projects.models.ToolChoiceWebSearchPreview": "OpenAI.ToolChoiceWebSearchPreview", + "azure.ai.projects.models.ToolChoiceWebSearchPreview20250311": "OpenAI.ToolChoiceWebSearchPreview20250311", "azure.ai.projects.models.ToolDescription": "Azure.AI.Projects.ToolDescription", "azure.ai.projects.models.ToolProjectConnection": "Azure.AI.Projects.ToolProjectConnection", "azure.ai.projects.models.TopLogProb": "OpenAI.TopLogProb", @@ -305,8 +282,8 @@ "azure.ai.projects.models.WebSearchTool": "OpenAI.WebSearchTool", "azure.ai.projects.models.WebSearchToolFilters": "OpenAI.WebSearchToolFilters", "azure.ai.projects.models.WeeklyRecurrenceSchedule": "Azure.AI.Projects.WeeklyRecurrenceSchedule", - "azure.ai.projects.models.WorkflowActionOutputItemResource": "Azure.AI.Projects.WorkflowActionOutputItemResource", "azure.ai.projects.models.WorkflowAgentDefinition": "Azure.AI.Projects.WorkflowAgentDefinition", + "azure.ai.projects.models.AgentObjectType": "Azure.AI.Projects.AgentObjectType", "azure.ai.projects.models.AgentKind": "Azure.AI.Projects.AgentKind", "azure.ai.projects.models.AgentProtocol": "Azure.AI.Projects.AgentProtocol", "azure.ai.projects.models.ToolType": "OpenAI.ToolType", @@ -319,9 +296,30 @@ "azure.ai.projects.models.InputFidelity": "OpenAI.InputFidelity", "azure.ai.projects.models.OpenApiAuthType": "Azure.AI.Projects.OpenApiAuthType", "azure.ai.projects.models.SearchContextSize": "OpenAI.SearchContextSize", + "azure.ai.projects.models.ToolChoiceParamType": "OpenAI.ToolChoiceParamType", "azure.ai.projects.models.TextResponseFormatConfigurationType": "OpenAI.TextResponseFormatConfigurationType", + "azure.ai.projects.models.FoundryFeaturesOptInKeys": "Azure.AI.Projects.FoundryFeaturesOptInKeys", "azure.ai.projects.models.PageOrder": "Azure.AI.Projects.PageOrder", - "azure.ai.projects.models.ContainerLogKind": "Azure.AI.Projects.ContainerLogKind", + "azure.ai.projects.models.ConnectionType": "Azure.AI.Projects.ConnectionType", + "azure.ai.projects.models.CredentialType": "Azure.AI.Projects.CredentialType", + "azure.ai.projects.models.DatasetType": "Azure.AI.Projects.DatasetType", + "azure.ai.projects.models.PendingUploadType": "Azure.AI.Projects.PendingUploadType", + "azure.ai.projects.models.DeploymentType": "Azure.AI.Projects.DeploymentType", + "azure.ai.projects.models.EvaluationTaxonomyInputType": "Azure.AI.Projects.EvaluationTaxonomyInputType", + "azure.ai.projects.models.RiskCategory": "Azure.AI.Projects.RiskCategory", + "azure.ai.projects.models.EvaluationRuleActionType": "Azure.AI.Projects.EvaluationRuleActionType", + "azure.ai.projects.models.EvaluationRuleEventType": "Azure.AI.Projects.EvaluationRuleEventType", + "azure.ai.projects.models.EvaluatorType": "Azure.AI.Projects.EvaluatorType", + "azure.ai.projects.models.EvaluatorCategory": "Azure.AI.Projects.EvaluatorCategory", + "azure.ai.projects.models.EvaluatorDefinitionType": "Azure.AI.Projects.EvaluatorDefinitionType", + "azure.ai.projects.models.EvaluatorMetricType": "Azure.AI.Projects.EvaluatorMetricType", + "azure.ai.projects.models.EvaluatorMetricDirection": "Azure.AI.Projects.EvaluatorMetricDirection", + "azure.ai.projects.models.IndexType": "Azure.AI.Projects.IndexType", + "azure.ai.projects.models.OperationState": "Azure.Core.Foundations.OperationState", + "azure.ai.projects.models.InsightType": "Azure.AI.Projects.InsightType", + "azure.ai.projects.models.SampleType": "Azure.AI.Projects.SampleType", + "azure.ai.projects.models.TreatmentEffectType": "Azure.AI.Projects.TreatmentEffectType", + "azure.ai.projects.models.MemoryStoreObjectType": "Azure.AI.Projects.MemoryStoreObjectType", "azure.ai.projects.models.MemoryStoreKind": "Azure.AI.Projects.MemoryStoreKind", "azure.ai.projects.models.MemoryItemKind": "Azure.AI.Projects.MemoryItemKind", "azure.ai.projects.models.InputItemType": "OpenAI.InputItemType", @@ -338,42 +336,16 @@ "azure.ai.projects.models.InputContentType": "OpenAI.InputContentType", "azure.ai.projects.models.OutputMessageContentType": "OpenAI.OutputMessageContentType", "azure.ai.projects.models.AnnotationType": "OpenAI.AnnotationType", - "azure.ai.projects.models.OutputContentType": "OpenAI.OutputContentType", "azure.ai.projects.models.FunctionShellCallItemStatus": "OpenAI.FunctionShellCallItemStatus", "azure.ai.projects.models.FunctionShellCallOutputOutcomeParamType": "OpenAI.FunctionShellCallOutputOutcomeParamType", "azure.ai.projects.models.MemoryOperationKind": "Azure.AI.Projects.MemoryOperationKind", - "azure.ai.projects.models.ConnectionType": "Azure.AI.Projects.ConnectionType", - "azure.ai.projects.models.CredentialType": "Azure.AI.Projects.CredentialType", - "azure.ai.projects.models.DatasetType": "Azure.AI.Projects.DatasetType", - "azure.ai.projects.models.PendingUploadType": "Azure.AI.Projects.PendingUploadType", - "azure.ai.projects.models.IndexType": "Azure.AI.Projects.IndexType", - "azure.ai.projects.models.DeploymentType": "Azure.AI.Projects.DeploymentType", "azure.ai.projects.models.AttackStrategy": "Azure.AI.Projects.AttackStrategy", - "azure.ai.projects.models.RiskCategory": "Azure.AI.Projects.RiskCategory", - "azure.ai.projects.models.EvaluationRuleActionType": "Azure.AI.Projects.EvaluationRuleActionType", - "azure.ai.projects.models.EvaluationRuleEventType": "Azure.AI.Projects.EvaluationRuleEventType", - "azure.ai.projects.models.EvaluationTaxonomyInputType": "Azure.AI.Projects.EvaluationTaxonomyInputType", - "azure.ai.projects.models.EvaluatorType": "Azure.AI.Projects.EvaluatorType", - "azure.ai.projects.models.EvaluatorCategory": "Azure.AI.Projects.EvaluatorCategory", - "azure.ai.projects.models.EvaluatorDefinitionType": "Azure.AI.Projects.EvaluatorDefinitionType", - "azure.ai.projects.models.EvaluatorMetricType": "Azure.AI.Projects.EvaluatorMetricType", - "azure.ai.projects.models.EvaluatorMetricDirection": "Azure.AI.Projects.EvaluatorMetricDirection", - "azure.ai.projects.models.OperationState": "Azure.Core.Foundations.OperationState", - "azure.ai.projects.models.InsightType": "Azure.AI.Projects.InsightType", - "azure.ai.projects.models.SampleType": "Azure.AI.Projects.SampleType", - "azure.ai.projects.models.TreatmentEffectType": "Azure.AI.Projects.TreatmentEffectType", "azure.ai.projects.models.ScheduleProvisioningStatus": "Azure.AI.Projects.ScheduleProvisioningStatus", "azure.ai.projects.models.TriggerType": "Azure.AI.Projects.TriggerType", "azure.ai.projects.models.RecurrenceType": "Azure.AI.Projects.RecurrenceType", "azure.ai.projects.models.DayOfWeek": "Azure.AI.Projects.DayOfWeek", "azure.ai.projects.models.ScheduleTaskType": "Azure.AI.Projects.ScheduleTaskType", "azure.ai.projects.models.MemoryStoreUpdateStatus": "Azure.AI.Projects.MemoryStoreUpdateStatus", - "azure.ai.projects.models.FunctionShellCallOutputOutcomeType": "OpenAI.FunctionShellCallOutputOutcomeType", - "azure.ai.projects.models.ApplyPatchFileOperationType": "OpenAI.ApplyPatchFileOperationType", - "azure.ai.projects.models.ItemResourceType": "OpenAI.ItemResourceType", - "azure.ai.projects.models.ApplyPatchCallStatus": "OpenAI.ApplyPatchCallStatus", - "azure.ai.projects.models.ApplyPatchCallOutputStatus": "OpenAI.ApplyPatchCallOutputStatus", - "azure.ai.projects.models.LocalShellCallStatus": "OpenAI.LocalShellCallStatus", "azure.ai.projects.operations.AgentsOperations.get": "Azure.AI.Projects.Agents.getAgent", "azure.ai.projects.aio.operations.AgentsOperations.get": "Azure.AI.Projects.Agents.getAgent", "azure.ai.projects.operations.AgentsOperations.create": "Azure.AI.Projects.Agents.createAgent", @@ -398,22 +370,6 @@ "azure.ai.projects.aio.operations.AgentsOperations.delete_version": "Azure.AI.Projects.Agents.deleteAgentVersion", "azure.ai.projects.operations.AgentsOperations.list_versions": "Azure.AI.Projects.Agents.listAgentVersions", "azure.ai.projects.aio.operations.AgentsOperations.list_versions": "Azure.AI.Projects.Agents.listAgentVersions", - "azure.ai.projects.operations.AgentsOperations.stream_agent_container_logs": "Azure.AI.Projects.Agents.streamAgentContainerLogs", - "azure.ai.projects.aio.operations.AgentsOperations.stream_agent_container_logs": "Azure.AI.Projects.Agents.streamAgentContainerLogs", - "azure.ai.projects.operations.MemoryStoresOperations.create": "Azure.AI.Projects.MemoryStores.createMemoryStore", - "azure.ai.projects.aio.operations.MemoryStoresOperations.create": "Azure.AI.Projects.MemoryStores.createMemoryStore", - "azure.ai.projects.operations.MemoryStoresOperations.update": "Azure.AI.Projects.MemoryStores.updateMemoryStore", - "azure.ai.projects.aio.operations.MemoryStoresOperations.update": "Azure.AI.Projects.MemoryStores.updateMemoryStore", - "azure.ai.projects.operations.MemoryStoresOperations.get": "Azure.AI.Projects.MemoryStores.getMemoryStore", - "azure.ai.projects.aio.operations.MemoryStoresOperations.get": "Azure.AI.Projects.MemoryStores.getMemoryStore", - "azure.ai.projects.operations.MemoryStoresOperations.list": "Azure.AI.Projects.MemoryStores.listMemoryStores", - "azure.ai.projects.aio.operations.MemoryStoresOperations.list": "Azure.AI.Projects.MemoryStores.listMemoryStores", - "azure.ai.projects.operations.MemoryStoresOperations.delete": "Azure.AI.Projects.MemoryStores.deleteMemoryStore", - "azure.ai.projects.aio.operations.MemoryStoresOperations.delete": "Azure.AI.Projects.MemoryStores.deleteMemoryStore", - "azure.ai.projects.operations.MemoryStoresOperations.search_memories": "Azure.AI.Projects.MemoryStores.searchMemories", - "azure.ai.projects.aio.operations.MemoryStoresOperations.search_memories": "Azure.AI.Projects.MemoryStores.searchMemories", - "azure.ai.projects.operations.MemoryStoresOperations.delete_scope": "Azure.AI.Projects.MemoryStores.deleteScope", - "azure.ai.projects.aio.operations.MemoryStoresOperations.delete_scope": "Azure.AI.Projects.MemoryStores.deleteScope", "azure.ai.projects.operations.ConnectionsOperations.list": "Azure.AI.Projects.Connections.list", "azure.ai.projects.aio.operations.ConnectionsOperations.list": "Azure.AI.Projects.Connections.list", "azure.ai.projects.operations.DatasetsOperations.list_versions": "Azure.AI.Projects.Datasets.listVersions", @@ -430,34 +386,10 @@ "azure.ai.projects.aio.operations.DatasetsOperations.pending_upload": "Azure.AI.Projects.Datasets.startPendingUploadVersion", "azure.ai.projects.operations.DatasetsOperations.get_credentials": "Azure.AI.Projects.Datasets.getCredentials", "azure.ai.projects.aio.operations.DatasetsOperations.get_credentials": "Azure.AI.Projects.Datasets.getCredentials", - "azure.ai.projects.operations.IndexesOperations.list_versions": "Azure.AI.Projects.Indexes.listVersions", - "azure.ai.projects.aio.operations.IndexesOperations.list_versions": "Azure.AI.Projects.Indexes.listVersions", - "azure.ai.projects.operations.IndexesOperations.list": "Azure.AI.Projects.Indexes.listLatest", - "azure.ai.projects.aio.operations.IndexesOperations.list": "Azure.AI.Projects.Indexes.listLatest", - "azure.ai.projects.operations.IndexesOperations.get": "Azure.AI.Projects.Indexes.getVersion", - "azure.ai.projects.aio.operations.IndexesOperations.get": "Azure.AI.Projects.Indexes.getVersion", - "azure.ai.projects.operations.IndexesOperations.delete": "Azure.AI.Projects.Indexes.deleteVersion", - "azure.ai.projects.aio.operations.IndexesOperations.delete": "Azure.AI.Projects.Indexes.deleteVersion", - "azure.ai.projects.operations.IndexesOperations.create_or_update": "Azure.AI.Projects.Indexes.createOrUpdateVersion", - "azure.ai.projects.aio.operations.IndexesOperations.create_or_update": "Azure.AI.Projects.Indexes.createOrUpdateVersion", "azure.ai.projects.operations.DeploymentsOperations.get": "Azure.AI.Projects.Deployments.get", "azure.ai.projects.aio.operations.DeploymentsOperations.get": "Azure.AI.Projects.Deployments.get", "azure.ai.projects.operations.DeploymentsOperations.list": "Azure.AI.Projects.Deployments.list", "azure.ai.projects.aio.operations.DeploymentsOperations.list": "Azure.AI.Projects.Deployments.list", - "azure.ai.projects.operations.RedTeamsOperations.get": "Azure.AI.Projects.RedTeams.get", - "azure.ai.projects.aio.operations.RedTeamsOperations.get": "Azure.AI.Projects.RedTeams.get", - "azure.ai.projects.operations.RedTeamsOperations.list": "Azure.AI.Projects.RedTeams.list", - "azure.ai.projects.aio.operations.RedTeamsOperations.list": "Azure.AI.Projects.RedTeams.list", - "azure.ai.projects.operations.RedTeamsOperations.create": "Azure.AI.Projects.RedTeams.create", - "azure.ai.projects.aio.operations.RedTeamsOperations.create": "Azure.AI.Projects.RedTeams.create", - "azure.ai.projects.operations.EvaluationRulesOperations.get": "Azure.AI.Projects.EvaluationRules.get", - "azure.ai.projects.aio.operations.EvaluationRulesOperations.get": "Azure.AI.Projects.EvaluationRules.get", - "azure.ai.projects.operations.EvaluationRulesOperations.delete": "Azure.AI.Projects.EvaluationRules.delete", - "azure.ai.projects.aio.operations.EvaluationRulesOperations.delete": "Azure.AI.Projects.EvaluationRules.delete", - "azure.ai.projects.operations.EvaluationRulesOperations.create_or_update": "Azure.AI.Projects.EvaluationRules.createOrUpdate", - "azure.ai.projects.aio.operations.EvaluationRulesOperations.create_or_update": "Azure.AI.Projects.EvaluationRules.createOrUpdate", - "azure.ai.projects.operations.EvaluationRulesOperations.list": "Azure.AI.Projects.EvaluationRules.list", - "azure.ai.projects.aio.operations.EvaluationRulesOperations.list": "Azure.AI.Projects.EvaluationRules.list", "azure.ai.projects.operations.EvaluationTaxonomiesOperations.get": "Azure.AI.Projects.EvaluationTaxonomies.get", "azure.ai.projects.aio.operations.EvaluationTaxonomiesOperations.get": "Azure.AI.Projects.EvaluationTaxonomies.get", "azure.ai.projects.operations.EvaluationTaxonomiesOperations.list": "Azure.AI.Projects.EvaluationTaxonomies.list", @@ -468,6 +400,14 @@ "azure.ai.projects.aio.operations.EvaluationTaxonomiesOperations.create": "Azure.AI.Projects.EvaluationTaxonomies.create", "azure.ai.projects.operations.EvaluationTaxonomiesOperations.update": "Azure.AI.Projects.EvaluationTaxonomies.update", "azure.ai.projects.aio.operations.EvaluationTaxonomiesOperations.update": "Azure.AI.Projects.EvaluationTaxonomies.update", + "azure.ai.projects.operations.EvaluationRulesOperations.get": "Azure.AI.Projects.EvaluationRules.get", + "azure.ai.projects.aio.operations.EvaluationRulesOperations.get": "Azure.AI.Projects.EvaluationRules.get", + "azure.ai.projects.operations.EvaluationRulesOperations.delete": "Azure.AI.Projects.EvaluationRules.delete", + "azure.ai.projects.aio.operations.EvaluationRulesOperations.delete": "Azure.AI.Projects.EvaluationRules.delete", + "azure.ai.projects.operations.EvaluationRulesOperations.create_or_update": "Azure.AI.Projects.EvaluationRules.createOrUpdate", + "azure.ai.projects.aio.operations.EvaluationRulesOperations.create_or_update": "Azure.AI.Projects.EvaluationRules.createOrUpdate", + "azure.ai.projects.operations.EvaluationRulesOperations.list": "Azure.AI.Projects.EvaluationRules.list", + "azure.ai.projects.aio.operations.EvaluationRulesOperations.list": "Azure.AI.Projects.EvaluationRules.list", "azure.ai.projects.operations.EvaluatorsOperations.list_versions": "Azure.AI.Projects.Evaluators.listVersions", "azure.ai.projects.aio.operations.EvaluatorsOperations.list_versions": "Azure.AI.Projects.Evaluators.listVersions", "azure.ai.projects.operations.EvaluatorsOperations.list_latest_versions": "Azure.AI.Projects.Evaluators.listLatestVersions", @@ -480,12 +420,42 @@ "azure.ai.projects.aio.operations.EvaluatorsOperations.create_version": "Azure.AI.Projects.Evaluators.createVersion", "azure.ai.projects.operations.EvaluatorsOperations.update_version": "Azure.AI.Projects.Evaluators.updateVersion", "azure.ai.projects.aio.operations.EvaluatorsOperations.update_version": "Azure.AI.Projects.Evaluators.updateVersion", + "azure.ai.projects.operations.IndexesOperations.list_versions": "Azure.AI.Projects.Indexes.listVersions", + "azure.ai.projects.aio.operations.IndexesOperations.list_versions": "Azure.AI.Projects.Indexes.listVersions", + "azure.ai.projects.operations.IndexesOperations.list": "Azure.AI.Projects.Indexes.listLatest", + "azure.ai.projects.aio.operations.IndexesOperations.list": "Azure.AI.Projects.Indexes.listLatest", + "azure.ai.projects.operations.IndexesOperations.get": "Azure.AI.Projects.Indexes.getVersion", + "azure.ai.projects.aio.operations.IndexesOperations.get": "Azure.AI.Projects.Indexes.getVersion", + "azure.ai.projects.operations.IndexesOperations.delete": "Azure.AI.Projects.Indexes.deleteVersion", + "azure.ai.projects.aio.operations.IndexesOperations.delete": "Azure.AI.Projects.Indexes.deleteVersion", + "azure.ai.projects.operations.IndexesOperations.create_or_update": "Azure.AI.Projects.Indexes.createOrUpdateVersion", + "azure.ai.projects.aio.operations.IndexesOperations.create_or_update": "Azure.AI.Projects.Indexes.createOrUpdateVersion", "azure.ai.projects.operations.InsightsOperations.generate": "Azure.AI.Projects.Insights.generate", "azure.ai.projects.aio.operations.InsightsOperations.generate": "Azure.AI.Projects.Insights.generate", "azure.ai.projects.operations.InsightsOperations.get": "Azure.AI.Projects.Insights.get", "azure.ai.projects.aio.operations.InsightsOperations.get": "Azure.AI.Projects.Insights.get", "azure.ai.projects.operations.InsightsOperations.list": "Azure.AI.Projects.Insights.list", "azure.ai.projects.aio.operations.InsightsOperations.list": "Azure.AI.Projects.Insights.list", + "azure.ai.projects.operations.MemoryStoresOperations.create": "Azure.AI.Projects.MemoryStores.createMemoryStore", + "azure.ai.projects.aio.operations.MemoryStoresOperations.create": "Azure.AI.Projects.MemoryStores.createMemoryStore", + "azure.ai.projects.operations.MemoryStoresOperations.update": "Azure.AI.Projects.MemoryStores.updateMemoryStore", + "azure.ai.projects.aio.operations.MemoryStoresOperations.update": "Azure.AI.Projects.MemoryStores.updateMemoryStore", + "azure.ai.projects.operations.MemoryStoresOperations.get": "Azure.AI.Projects.MemoryStores.getMemoryStore", + "azure.ai.projects.aio.operations.MemoryStoresOperations.get": "Azure.AI.Projects.MemoryStores.getMemoryStore", + "azure.ai.projects.operations.MemoryStoresOperations.list": "Azure.AI.Projects.MemoryStores.listMemoryStores", + "azure.ai.projects.aio.operations.MemoryStoresOperations.list": "Azure.AI.Projects.MemoryStores.listMemoryStores", + "azure.ai.projects.operations.MemoryStoresOperations.delete": "Azure.AI.Projects.MemoryStores.deleteMemoryStore", + "azure.ai.projects.aio.operations.MemoryStoresOperations.delete": "Azure.AI.Projects.MemoryStores.deleteMemoryStore", + "azure.ai.projects.operations.MemoryStoresOperations.search_memories": "Azure.AI.Projects.MemoryStores.searchMemories", + "azure.ai.projects.aio.operations.MemoryStoresOperations.search_memories": "Azure.AI.Projects.MemoryStores.searchMemories", + "azure.ai.projects.operations.MemoryStoresOperations.delete_scope": "Azure.AI.Projects.MemoryStores.deleteScope", + "azure.ai.projects.aio.operations.MemoryStoresOperations.delete_scope": "Azure.AI.Projects.MemoryStores.deleteScope", + "azure.ai.projects.operations.RedTeamsOperations.get": "Azure.AI.Projects.RedTeams.get", + "azure.ai.projects.aio.operations.RedTeamsOperations.get": "Azure.AI.Projects.RedTeams.get", + "azure.ai.projects.operations.RedTeamsOperations.list": "Azure.AI.Projects.RedTeams.list", + "azure.ai.projects.aio.operations.RedTeamsOperations.list": "Azure.AI.Projects.RedTeams.list", + "azure.ai.projects.operations.RedTeamsOperations.create": "Azure.AI.Projects.RedTeams.create", + "azure.ai.projects.aio.operations.RedTeamsOperations.create": "Azure.AI.Projects.RedTeams.create", "azure.ai.projects.operations.SchedulesOperations.delete": "Azure.AI.Projects.Schedules.delete", "azure.ai.projects.aio.operations.SchedulesOperations.delete": "Azure.AI.Projects.Schedules.delete", "azure.ai.projects.operations.SchedulesOperations.get": "Azure.AI.Projects.Schedules.get", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py index 65df15b7b235..db8115d758a2 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py @@ -40,39 +40,38 @@ class AIProjectClient: # pylint: disable=too-many-instance-attributes :ivar agents: AgentsOperations operations :vartype agents: azure.ai.projects.operations.AgentsOperations - :ivar memory_stores: MemoryStoresOperations operations - :vartype memory_stores: azure.ai.projects.operations.MemoryStoresOperations :ivar connections: ConnectionsOperations operations :vartype connections: azure.ai.projects.operations.ConnectionsOperations :ivar datasets: DatasetsOperations operations :vartype datasets: azure.ai.projects.operations.DatasetsOperations - :ivar indexes: IndexesOperations operations - :vartype indexes: azure.ai.projects.operations.IndexesOperations :ivar deployments: DeploymentsOperations operations :vartype deployments: azure.ai.projects.operations.DeploymentsOperations - :ivar red_teams: RedTeamsOperations operations - :vartype red_teams: azure.ai.projects.operations.RedTeamsOperations - :ivar evaluation_rules: EvaluationRulesOperations operations - :vartype evaluation_rules: azure.ai.projects.operations.EvaluationRulesOperations :ivar evaluation_taxonomies: EvaluationTaxonomiesOperations operations :vartype evaluation_taxonomies: azure.ai.projects.operations.EvaluationTaxonomiesOperations + :ivar evaluation_rules: EvaluationRulesOperations operations + :vartype evaluation_rules: azure.ai.projects.operations.EvaluationRulesOperations :ivar evaluators: EvaluatorsOperations operations :vartype evaluators: azure.ai.projects.operations.EvaluatorsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.operations.IndexesOperations :ivar insights: InsightsOperations operations :vartype insights: azure.ai.projects.operations.InsightsOperations + :ivar memory_stores: MemoryStoresOperations operations + :vartype memory_stores: azure.ai.projects.operations.MemoryStoresOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.operations.RedTeamsOperations :ivar schedules: SchedulesOperations operations :vartype schedules: azure.ai.projects.operations.SchedulesOperations :param endpoint: Foundry Project endpoint in the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". - If you only have one Project in your Foundry Hub, or to target the default Project - in your Hub, use the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". Required. + "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". If you + only have one Project in your Foundry Hub, or to target the default Project in your Hub, use + the form "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". + Required. :type endpoint: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2025-11-15-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "v1". Note + that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -103,20 +102,20 @@ def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) self._deserialize = Deserializer() self._serialize.client_side_validation = False self.agents = AgentsOperations(self._client, self._config, self._serialize, self._deserialize) - self.memory_stores = MemoryStoresOperations(self._client, self._config, self._serialize, self._deserialize) self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) - self.indexes = IndexesOperations(self._client, self._config, self._serialize, self._deserialize) self.deployments = DeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) - self.red_teams = RedTeamsOperations(self._client, self._config, self._serialize, self._deserialize) - self.evaluation_rules = EvaluationRulesOperations( + self.evaluation_taxonomies = EvaluationTaxonomiesOperations( self._client, self._config, self._serialize, self._deserialize ) - self.evaluation_taxonomies = EvaluationTaxonomiesOperations( + self.evaluation_rules = EvaluationRulesOperations( self._client, self._config, self._serialize, self._deserialize ) self.evaluators = EvaluatorsOperations(self._client, self._config, self._serialize, self._deserialize) + self.indexes = IndexesOperations(self._client, self._config, self._serialize, self._deserialize) self.insights = InsightsOperations(self._client, self._config, self._serialize, self._deserialize) + self.memory_stores = MemoryStoresOperations(self._client, self._config, self._serialize, self._deserialize) + self.red_teams = RedTeamsOperations(self._client, self._config, self._serialize, self._deserialize) self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py index ad3f889051fe..5607f6b685fd 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py @@ -23,21 +23,20 @@ class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attribu attributes. :param endpoint: Foundry Project endpoint in the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". - If you only have one Project in your Foundry Hub, or to target the default Project - in your Hub, use the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". Required. + "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". If you + only have one Project in your Foundry Hub, or to target the default Project in your Hub, use + the form "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". + Required. :type endpoint: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2025-11-15-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "v1". Note + that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2025-11-15-preview") + api_version: str = kwargs.pop("api_version", "v1") if endpoint is None: raise ValueError("Parameter 'endpoint' must not be None.") diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py index a6902daaafa6..9c478f8f18ca 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py @@ -10,6 +10,7 @@ import os import logging from typing import List, Any +import httpx from openai import OpenAI from azure.core.tracing.decorator import distributed_trace from azure.core.credentials import TokenCredential @@ -26,43 +27,39 @@ class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-ins :ivar agents: AgentsOperations operations :vartype agents: azure.ai.projects.operations.AgentsOperations - :ivar memory_stores: MemoryStoresOperations operations - :vartype memory_stores: azure.ai.projects.operations.MemoryStoresOperations :ivar connections: ConnectionsOperations operations :vartype connections: azure.ai.projects.operations.ConnectionsOperations :ivar datasets: DatasetsOperations operations :vartype datasets: azure.ai.projects.operations.DatasetsOperations - :ivar indexes: IndexesOperations operations - :vartype indexes: azure.ai.projects.operations.IndexesOperations :ivar deployments: DeploymentsOperations operations :vartype deployments: azure.ai.projects.operations.DeploymentsOperations - :ivar red_teams: RedTeamsOperations operations - :vartype red_teams: azure.ai.projects.operations.RedTeamsOperations - :ivar evaluation_rules: EvaluationRulesOperations operations - :vartype evaluation_rules: azure.ai.projects.operations.EvaluationRulesOperations :ivar evaluation_taxonomies: EvaluationTaxonomiesOperations operations :vartype evaluation_taxonomies: azure.ai.projects.operations.EvaluationTaxonomiesOperations + :ivar evaluation_rules: EvaluationRulesOperations operations + :vartype evaluation_rules: azure.ai.projects.operations.EvaluationRulesOperations :ivar evaluators: EvaluatorsOperations operations :vartype evaluators: azure.ai.projects.operations.EvaluatorsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.operations.IndexesOperations :ivar insights: InsightsOperations operations :vartype insights: azure.ai.projects.operations.InsightsOperations + :ivar memory_stores: MemoryStoresOperations operations + :vartype memory_stores: azure.ai.projects.operations.MemoryStoresOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.operations.RedTeamsOperations :ivar schedules: SchedulesOperations operations :vartype schedules: azure.ai.projects.operations.SchedulesOperations :param endpoint: Foundry Project endpoint in the form - ``https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}``. If - you only have one Project in your Foundry Hub, or to target the default Project in your Hub, - use the form - ``https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project``. Required. + "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". If you + only have one Project in your Foundry Hub, or to target the default Project in your Hub, use + the form "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". + Required. :type endpoint: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2025-11-15-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "v1". Note + that overriding this default value may result in unsupported behavior. :paramtype api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :keyword user_agent: Optional string identifying the caller. This string will show up at the front of the "User-Agent" HTTP request header in all network calls this client makes. If an OpenAI client was obtained by calling get_openai_client(), this string will also show up at the front of the "User-Agent" request header in network calls that OpenAI client makes. """ def __init__(self, endpoint: str, credential: TokenCredential, **kwargs: Any) -> None: @@ -102,8 +99,7 @@ def get_openai_client(self, **kwargs: Any) -> "OpenAI": # type: ignore[name-def The OpenAI client constructor is called with: - * ``base_url`` set to the endpoint provided to the AIProjectClient constructor, with "/openai" appended. - * ``api-version`` set to "2025-05-15-preview" by default, unless overridden by the ``api_version`` keyword argument. + * ``base_url`` set to the endpoint provided to the AIProjectClient constructor, with "/openai/v1" appended. * ``api_key`` set to a get_bearer_token_provider() callable that uses the TokenCredential provided to the AIProjectClient constructor, with scope "https://ai.azure.com/.default". .. note:: The packages ``openai`` and ``azure.identity`` must be installed prior to calling this method. @@ -111,122 +107,24 @@ def get_openai_client(self, **kwargs: Any) -> "OpenAI": # type: ignore[name-def :return: An authenticated OpenAI client :rtype: ~openai.OpenAI - :raises ~azure.core.exceptions.ModuleNotFoundError: if the ``openai`` package - is not installed. :raises ~azure.core.exceptions.HttpResponseError: """ - base_url = self._config.endpoint.rstrip("/") + "/openai" # pylint: disable=protected-access - - if "default_query" not in kwargs: - kwargs["default_query"] = {"api-version": "2025-11-15-preview"} + base_url = self._config.endpoint.rstrip("/") + "/openai/v1" # pylint: disable=protected-access logger.debug( # pylint: disable=specify-parameter-names-in-call "[get_openai_client] Creating OpenAI client using Entra ID authentication, base_url = `%s`", # pylint: disable=line-too-long base_url, ) - http_client = None - kwargs = kwargs.copy() if kwargs else {} - if self._console_logging_enabled: - try: - import httpx - except ModuleNotFoundError as e: - raise ModuleNotFoundError("Failed to import httpx. Please install it using 'pip install httpx'") from e - - class OpenAILoggingTransport(httpx.HTTPTransport): - - def _sanitize_auth_header(self, headers) -> None: - """Sanitize authorization header by redacting sensitive information. - - :param headers: Dictionary of HTTP headers to sanitize - :type headers: dict - """ - - if "authorization" in headers: - auth_value = headers["authorization"] - if len(auth_value) >= 7: - headers["authorization"] = auth_value[:7] + "" - else: - headers["authorization"] = "" - - def handle_request(self, request: httpx.Request) -> httpx.Response: - """ - Log HTTP request and response details to console, in a nicely formatted way, - for OpenAI / Azure OpenAI clients. - - :param request: The HTTP request to handle and log - :type request: httpx.Request - - :return: The HTTP response received - :rtype: httpx.Response - """ - - print(f"\n==> Request:\n{request.method} {request.url}") - headers = dict(request.headers) - self._sanitize_auth_header(headers) - print("Headers:") - for key, value in sorted(headers.items()): - print(f" {key}: {value}") - - self._log_request_body(request) - - response = super().handle_request(request) - - print(f"\n<== Response:\n{response.status_code} {response.reason_phrase}") - print("Headers:") - for key, value in sorted(dict(response.headers).items()): - print(f" {key}: {value}") - - content = response.read() - if content is None or content == b"": - print("Body: [No content]") - else: - try: - print(f"Body:\n {content.decode('utf-8')}") - except Exception: # pylint: disable=broad-exception-caught - print(f"Body (raw):\n {content!r}") - print("\n") - - return response - - def _log_request_body(self, request: httpx.Request) -> None: - """Log request body content safely, handling binary data and streaming content. - - :param request: The HTTP request object containing the body to log - :type request: httpx.Request - """ - - # Check content-type header to identify file uploads - content_type = request.headers.get("content-type", "").lower() - if "multipart/form-data" in content_type: - print("Body: [Multipart form data - file upload, not logged]") - return - - # Safely check if content exists without accessing it - if not hasattr(request, "content"): - print("Body: [No content attribute]") - return - - # Very careful content access - wrap in try-catch immediately - try: - content = request.content - except Exception as access_error: # pylint: disable=broad-exception-caught - print(f"Body: [Cannot access content: {access_error}]") - return - - if content is None or content == b"": - print("Body: [No content]") - return - - try: - print(f"Body:\n {content.decode('utf-8')}") - except Exception: # pylint: disable=broad-exception-caught - print(f"Body (raw):\n {content!r}") - + if "http_client" in kwargs: + http_client = kwargs.pop("http_client") + elif self._console_logging_enabled: http_client = httpx.Client(transport=OpenAILoggingTransport()) + else: + http_client = None default_headers = dict[str, str](kwargs.pop("default_headers", None) or {}) @@ -264,6 +162,107 @@ def _create_openai_client(**kwargs) -> OpenAI: return client +class OpenAILoggingTransport(httpx.HTTPTransport): + """Custom HTTP transport that logs OpenAI API requests and responses to the console. + + This transport wraps httpx.HTTPTransport to intercept all HTTP traffic and print + detailed request/response information for debugging purposes. It automatically + redacts sensitive authorization headers and handles various content types including + multipart form data (file uploads). + + Used internally by AIProjectClient when console logging is enabled via the + AZURE_AI_PROJECTS_CONSOLE_LOGGING environment variable. + """ + + def _sanitize_auth_header(self, headers) -> None: + """Sanitize authorization header by redacting sensitive information. + + :param headers: Dictionary of HTTP headers to sanitize + :type headers: dict + """ + + if "authorization" in headers: + auth_value = headers["authorization"] + if len(auth_value) >= 7: + headers["authorization"] = auth_value[:7] + "" + else: + headers["authorization"] = "" + + def handle_request(self, request: httpx.Request) -> httpx.Response: + """ + Log HTTP request and response details to console, in a nicely formatted way, + for OpenAI / Azure OpenAI clients. + + :param request: The HTTP request to handle and log + :type request: httpx.Request + + :return: The HTTP response received + :rtype: httpx.Response + """ + + print(f"\n==> Request:\n{request.method} {request.url}") + headers = dict(request.headers) + self._sanitize_auth_header(headers) + print("Headers:") + for key, value in sorted(headers.items()): + print(f" {key}: {value}") + + self._log_request_body(request) + + response = super().handle_request(request) + + print(f"\n<== Response:\n{response.status_code} {response.reason_phrase}") + print("Headers:") + for key, value in sorted(dict(response.headers).items()): + print(f" {key}: {value}") + + content = response.read() + if content is None or content == b"": + print("Body: [No content]") + else: + try: + print(f"Body:\n {content.decode('utf-8')}") + except Exception: # pylint: disable=broad-exception-caught + print(f"Body (raw):\n {content!r}") + print("\n") + + return response + + def _log_request_body(self, request: httpx.Request) -> None: + """Log request body content safely, handling binary data and streaming content. + + :param request: The HTTP request object containing the body to log + :type request: httpx.Request + """ + + # Check content-type header to identify file uploads + content_type = request.headers.get("content-type", "").lower() + if "multipart/form-data" in content_type: + print("Body: [Multipart form data - file upload, not logged]") + return + + # Safely check if content exists without accessing it + if not hasattr(request, "content"): + print("Body: [No content attribute]") + return + + # Very careful content access - wrap in try-catch immediately + try: + content = request.content + except Exception as access_error: # pylint: disable=broad-exception-caught + print(f"Body: [Cannot access content: {access_error}]") + return + + if content is None or content == b"": + print("Body: [No content]") + return + + try: + print(f"Body:\n {content.decode('utf-8')}") + except Exception: # pylint: disable=broad-exception-caught + print(f"Body (raw):\n {content!r}") + + __all__: List[str] = [ "AIProjectClient", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py index e0637b7cfdc4..4f7316e3cba1 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter +# pylint: disable=protected-access, broad-except import copy import calendar @@ -1037,7 +1037,7 @@ def _failsafe_deserialize( ) -> typing.Any: try: return _deserialize(deserializer, response.json(), module, rf, format) - except DeserializationError: + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -1050,7 +1050,7 @@ def _failsafe_deserialize_xml( ) -> typing.Any: try: return _deserialize_xml(deserializer, response.text()) - except DeserializationError: + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py index 1b918785c3c0..eac98069ec59 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py @@ -40,39 +40,38 @@ class AIProjectClient: # pylint: disable=too-many-instance-attributes :ivar agents: AgentsOperations operations :vartype agents: azure.ai.projects.aio.operations.AgentsOperations - :ivar memory_stores: MemoryStoresOperations operations - :vartype memory_stores: azure.ai.projects.aio.operations.MemoryStoresOperations :ivar connections: ConnectionsOperations operations :vartype connections: azure.ai.projects.aio.operations.ConnectionsOperations :ivar datasets: DatasetsOperations operations :vartype datasets: azure.ai.projects.aio.operations.DatasetsOperations - :ivar indexes: IndexesOperations operations - :vartype indexes: azure.ai.projects.aio.operations.IndexesOperations :ivar deployments: DeploymentsOperations operations :vartype deployments: azure.ai.projects.aio.operations.DeploymentsOperations - :ivar red_teams: RedTeamsOperations operations - :vartype red_teams: azure.ai.projects.aio.operations.RedTeamsOperations - :ivar evaluation_rules: EvaluationRulesOperations operations - :vartype evaluation_rules: azure.ai.projects.aio.operations.EvaluationRulesOperations :ivar evaluation_taxonomies: EvaluationTaxonomiesOperations operations :vartype evaluation_taxonomies: azure.ai.projects.aio.operations.EvaluationTaxonomiesOperations + :ivar evaluation_rules: EvaluationRulesOperations operations + :vartype evaluation_rules: azure.ai.projects.aio.operations.EvaluationRulesOperations :ivar evaluators: EvaluatorsOperations operations :vartype evaluators: azure.ai.projects.aio.operations.EvaluatorsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.aio.operations.IndexesOperations :ivar insights: InsightsOperations operations :vartype insights: azure.ai.projects.aio.operations.InsightsOperations + :ivar memory_stores: MemoryStoresOperations operations + :vartype memory_stores: azure.ai.projects.aio.operations.MemoryStoresOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.aio.operations.RedTeamsOperations :ivar schedules: SchedulesOperations operations :vartype schedules: azure.ai.projects.aio.operations.SchedulesOperations :param endpoint: Foundry Project endpoint in the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". - If you only have one Project in your Foundry Hub, or to target the default Project - in your Hub, use the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". Required. + "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". If you + only have one Project in your Foundry Hub, or to target the default Project in your Hub, use + the form "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". + Required. :type endpoint: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2025-11-15-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "v1". Note + that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -103,20 +102,20 @@ def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: self._deserialize = Deserializer() self._serialize.client_side_validation = False self.agents = AgentsOperations(self._client, self._config, self._serialize, self._deserialize) - self.memory_stores = MemoryStoresOperations(self._client, self._config, self._serialize, self._deserialize) self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) - self.indexes = IndexesOperations(self._client, self._config, self._serialize, self._deserialize) self.deployments = DeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) - self.red_teams = RedTeamsOperations(self._client, self._config, self._serialize, self._deserialize) - self.evaluation_rules = EvaluationRulesOperations( + self.evaluation_taxonomies = EvaluationTaxonomiesOperations( self._client, self._config, self._serialize, self._deserialize ) - self.evaluation_taxonomies = EvaluationTaxonomiesOperations( + self.evaluation_rules = EvaluationRulesOperations( self._client, self._config, self._serialize, self._deserialize ) self.evaluators = EvaluatorsOperations(self._client, self._config, self._serialize, self._deserialize) + self.indexes = IndexesOperations(self._client, self._config, self._serialize, self._deserialize) self.insights = InsightsOperations(self._client, self._config, self._serialize, self._deserialize) + self.memory_stores = MemoryStoresOperations(self._client, self._config, self._serialize, self._deserialize) + self.red_teams = RedTeamsOperations(self._client, self._config, self._serialize, self._deserialize) self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) def send_request( diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py index 39ed534eb174..91e944b3bbae 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py @@ -23,21 +23,20 @@ class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attribu attributes. :param endpoint: Foundry Project endpoint in the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". - If you only have one Project in your Foundry Hub, or to target the default Project - in your Hub, use the form - "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". Required. + "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". If you + only have one Project in your Foundry Hub, or to target the default Project in your Hub, use + the form "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". + Required. :type endpoint: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2025-11-15-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "v1". Note + that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2025-11-15-preview") + api_version: str = kwargs.pop("api_version", "v1") if endpoint is None: raise ValueError("Parameter 'endpoint' must not be None.") diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py index 4a9f2d6ca489..071c2478befa 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py @@ -10,6 +10,7 @@ import os import logging from typing import List, Any +import httpx from openai import AsyncOpenAI from azure.core.tracing.decorator import distributed_trace from azure.core.credentials_async import AsyncTokenCredential @@ -26,44 +27,39 @@ class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-ins :ivar agents: AgentsOperations operations :vartype agents: azure.ai.projects.aio.operations.AgentsOperations - :ivar memory_stores: MemoryStoresOperations operations - :vartype memory_stores: azure.ai.projects.aio.operations.MemoryStoresOperations :ivar connections: ConnectionsOperations operations :vartype connections: azure.ai.projects.aio.operations.ConnectionsOperations :ivar datasets: DatasetsOperations operations :vartype datasets: azure.ai.projects.aio.operations.DatasetsOperations - :ivar indexes: IndexesOperations operations - :vartype indexes: azure.ai.projects.aio.operations.IndexesOperations :ivar deployments: DeploymentsOperations operations :vartype deployments: azure.ai.projects.aio.operations.DeploymentsOperations - :ivar red_teams: RedTeamsOperations operations - :vartype red_teams: azure.ai.projects.aio.operations.RedTeamsOperations - :ivar evaluation_rules: EvaluationRulesOperations operations - :vartype evaluation_rules: azure.ai.projects.aio.operations.EvaluationRulesOperations :ivar evaluation_taxonomies: EvaluationTaxonomiesOperations operations :vartype evaluation_taxonomies: azure.ai.projects.aio.operations.EvaluationTaxonomiesOperations + :ivar evaluation_rules: EvaluationRulesOperations operations + :vartype evaluation_rules: azure.ai.projects.aio.operations.EvaluationRulesOperations :ivar evaluators: EvaluatorsOperations operations :vartype evaluators: azure.ai.projects.aio.operations.EvaluatorsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.aio.operations.IndexesOperations :ivar insights: InsightsOperations operations :vartype insights: azure.ai.projects.aio.operations.InsightsOperations + :ivar memory_stores: MemoryStoresOperations operations + :vartype memory_stores: azure.ai.projects.aio.operations.MemoryStoresOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.aio.operations.RedTeamsOperations :ivar schedules: SchedulesOperations operations :vartype schedules: azure.ai.projects.aio.operations.SchedulesOperations :param endpoint: Foundry Project endpoint in the form - ``https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}``. If - you only have one Project in your Foundry Hub, or to target the default Project in your Hub, - use the form - ``https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project``. Required. + "https://{ai-services-account-name}.services.ai.azure.com/api/projects/{project-name}". If you + only have one Project in your Foundry Hub, or to target the default Project in your Hub, use + the form "https://{ai-services-account-name}.services.ai.azure.com/api/projects/_project". + Required. :type endpoint: str :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2025-11-15-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "v1". Note + that overriding this default value may result in unsupported behavior. :paramtype api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :keyword user_agent: Optional string identifying the caller. This string will show up at the front of the "User-Agent" HTTP request header in all network calls this client makes. If an OpenAI client was obtained by calling get_openai_client(), this string will also show up at the front of the "User-Agent" request header in network calls that OpenAI client makes. - :meth:`get_openai_client`. """ def __init__(self, endpoint: str, credential: AsyncTokenCredential, **kwargs: Any) -> None: @@ -103,8 +99,7 @@ def get_openai_client(self, **kwargs: Any) -> "AsyncOpenAI": # type: ignore[nam The AsyncOpenAI client constructor is called with: - * ``base_url`` set to the endpoint provided to the AIProjectClient constructor, with "/openai" appended. - * ``api-version`` set to "2025-05-15-preview" by default, unless overridden by the ``api_version`` keyword argument. + * ``base_url`` set to the endpoint provided to the AIProjectClient constructor, with "/openai/v1" appended. * ``api_key`` set to a get_bearer_token_provider() callable that uses the TokenCredential provided to the AIProjectClient constructor, with scope "https://ai.azure.com/.default". .. note:: The packages ``openai`` and ``azure.identity`` must be installed prior to calling this method. @@ -112,122 +107,24 @@ def get_openai_client(self, **kwargs: Any) -> "AsyncOpenAI": # type: ignore[nam :return: An authenticated AsyncOpenAI client :rtype: ~openai.AsyncOpenAI - :raises ~azure.core.exceptions.ModuleNotFoundError: if the ``openai`` package - is not installed. :raises ~azure.core.exceptions.HttpResponseError: """ - base_url = self._config.endpoint.rstrip("/") + "/openai" # pylint: disable=protected-access - - if "default_query" not in kwargs: - kwargs["default_query"] = {"api-version": "2025-11-15-preview"} + base_url = self._config.endpoint.rstrip("/") + "/openai/v1" # pylint: disable=protected-access logger.debug( # pylint: disable=specify-parameter-names-in-call "[get_openai_client] Creating OpenAI client using Entra ID authentication, base_url = `%s`", # pylint: disable=line-too-long base_url, ) - http_client = None - kwargs = kwargs.copy() if kwargs else {} - if self._console_logging_enabled: - try: - import httpx - except ModuleNotFoundError as e: - raise ModuleNotFoundError("Failed to import httpx. Please install it using 'pip install httpx'") from e - - class OpenAILoggingTransport(httpx.AsyncHTTPTransport): - - def _sanitize_auth_header(self, headers): - """Sanitize authorization header by redacting sensitive information. - - :param headers: Dictionary of HTTP headers to sanitize - :type headers: dict - """ - - if "authorization" in headers: - auth_value = headers["authorization"] - if len(auth_value) >= 7: - headers["authorization"] = auth_value[:7] + "" - else: - headers["authorization"] = "" - - async def handle_async_request(self, request: httpx.Request) -> httpx.Response: - """ - Log HTTP request and response details to console, in a nicely formatted way, - for OpenAI / Azure OpenAI clients. - - :param request: The HTTP request to handle and log - :type request: httpx.Request - - :return: The HTTP response received - :rtype: httpx.Response - """ - - print(f"\n==> Request:\n{request.method} {request.url}") - headers = dict(request.headers) - self._sanitize_auth_header(headers) - print("Headers:") - for key, value in sorted(headers.items()): - print(f" {key}: {value}") - - self._log_request_body(request) - - response = await super().handle_async_request(request) - - print(f"\n<== Response:\n{response.status_code} {response.reason_phrase}") - print("Headers:") - for key, value in sorted(dict(response.headers).items()): - print(f" {key}: {value}") - - content = await response.aread() - if content is None or content == b"": - print("Body: [No content]") - else: - try: - print(f"Body:\n {content.decode('utf-8')}") - except Exception: # pylint: disable=broad-exception-caught - print(f"Body (raw):\n {content!r}") - print("\n") - - return response - - def _log_request_body(self, request: httpx.Request) -> None: - """Log request body content safely, handling binary data and streaming content. - - :param request: The HTTP request object containing the body to log - :type request: httpx.Request - """ - - # Check content-type header to identify file uploads - content_type = request.headers.get("content-type", "").lower() - if "multipart/form-data" in content_type: - print("Body: [Multipart form data - file upload, not logged]") - return - - # Safely check if content exists without accessing it - if not hasattr(request, "content"): - print("Body: [No content attribute]") - return - - # Very careful content access - wrap in try-catch immediately - try: - content = request.content - except Exception as access_error: # pylint: disable=broad-exception-caught - print(f"Body: [Cannot access content: {access_error}]") - return - - if content is None or content == b"": - print("Body: [No content]") - return - - try: - print(f"Body:\n {content.decode('utf-8')}") - except Exception: # pylint: disable=broad-exception-caught - print(f"Body (raw):\n {content!r}") - + if "http_client" in kwargs: + http_client = kwargs.pop("http_client") + elif self._console_logging_enabled: http_client = httpx.AsyncClient(transport=OpenAILoggingTransport()) + else: + http_client = None default_headers = dict[str, str](kwargs.pop("default_headers", None) or {}) @@ -265,6 +162,107 @@ def _create_openai_client(**kwargs) -> AsyncOpenAI: return client +class OpenAILoggingTransport(httpx.AsyncHTTPTransport): + """Custom HTTP async transport that logs OpenAI API requests and responses to the console. + + This transport wraps httpx.AsyncHTTPTransport to intercept all HTTP traffic and print + detailed request/response information for debugging purposes. It automatically + redacts sensitive authorization headers and handles various content types including + multipart form data (file uploads). + + Used internally by AIProjectClient when console logging is enabled via the + AZURE_AI_PROJECTS_CONSOLE_LOGGING environment variable. + """ + + def _sanitize_auth_header(self, headers): + """Sanitize authorization header by redacting sensitive information. + + :param headers: Dictionary of HTTP headers to sanitize + :type headers: dict + """ + + if "authorization" in headers: + auth_value = headers["authorization"] + if len(auth_value) >= 7: + headers["authorization"] = auth_value[:7] + "" + else: + headers["authorization"] = "" + + async def handle_async_request(self, request: httpx.Request) -> httpx.Response: + """ + Log HTTP request and response details to console, in a nicely formatted way, + for OpenAI / Azure OpenAI clients. + + :param request: The HTTP request to handle and log + :type request: httpx.Request + + :return: The HTTP response received + :rtype: httpx.Response + """ + + print(f"\n==> Request:\n{request.method} {request.url}") + headers = dict(request.headers) + self._sanitize_auth_header(headers) + print("Headers:") + for key, value in sorted(headers.items()): + print(f" {key}: {value}") + + self._log_request_body(request) + + response = await super().handle_async_request(request) + + print(f"\n<== Response:\n{response.status_code} {response.reason_phrase}") + print("Headers:") + for key, value in sorted(dict(response.headers).items()): + print(f" {key}: {value}") + + content = await response.aread() + if content is None or content == b"": + print("Body: [No content]") + else: + try: + print(f"Body:\n {content.decode('utf-8')}") + except Exception: # pylint: disable=broad-exception-caught + print(f"Body (raw):\n {content!r}") + print("\n") + + return response + + def _log_request_body(self, request: httpx.Request) -> None: + """Log request body content safely, handling binary data and streaming content. + + :param request: The HTTP request object containing the body to log + :type request: httpx.Request + """ + + # Check content-type header to identify file uploads + content_type = request.headers.get("content-type", "").lower() + if "multipart/form-data" in content_type: + print("Body: [Multipart form data - file upload, not logged]") + return + + # Safely check if content exists without accessing it + if not hasattr(request, "content"): + print("Body: [No content attribute]") + return + + # Very careful content access - wrap in try-catch immediately + try: + content = request.content + except Exception as access_error: # pylint: disable=broad-exception-caught + print(f"Body: [Cannot access content: {access_error}]") + return + + if content is None or content == b"": + print("Body: [No content]") + return + + try: + print(f"Body:\n {content.decode('utf-8')}") + except Exception: # pylint: disable=broad-exception-caught + print(f"Body (raw):\n {content!r}") + + __all__: List[str] = ["AIProjectClient"] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/__init__.py index 5ae1225f30fa..7c53165b9f1d 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/__init__.py @@ -13,16 +13,16 @@ from ._patch import * # pylint: disable=unused-wildcard-import from ._operations import AgentsOperations # type: ignore -from ._operations import MemoryStoresOperations # type: ignore from ._operations import ConnectionsOperations # type: ignore from ._operations import DatasetsOperations # type: ignore -from ._operations import IndexesOperations # type: ignore from ._operations import DeploymentsOperations # type: ignore -from ._operations import RedTeamsOperations # type: ignore -from ._operations import EvaluationRulesOperations # type: ignore from ._operations import EvaluationTaxonomiesOperations # type: ignore +from ._operations import EvaluationRulesOperations # type: ignore from ._operations import EvaluatorsOperations # type: ignore +from ._operations import IndexesOperations # type: ignore from ._operations import InsightsOperations # type: ignore +from ._operations import MemoryStoresOperations # type: ignore +from ._operations import RedTeamsOperations # type: ignore from ._operations import SchedulesOperations # type: ignore from ._patch import __all__ as _patch_all @@ -31,16 +31,16 @@ __all__ = [ "AgentsOperations", - "MemoryStoresOperations", "ConnectionsOperations", "DatasetsOperations", - "IndexesOperations", "DeploymentsOperations", - "RedTeamsOperations", - "EvaluationRulesOperations", "EvaluationTaxonomiesOperations", + "EvaluationRulesOperations", "EvaluatorsOperations", + "IndexesOperations", "InsightsOperations", + "MemoryStoresOperations", + "RedTeamsOperations", "SchedulesOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index c5bbefb67fe9..9fdb00021cb8 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -35,6 +35,7 @@ from ... import models as _models from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from ..._utils.serialization import Deserializer, Serializer +from ...models._enums import FoundryFeaturesOptInKeys from ...operations._operations import ( build_agents_create_from_manifest_request, build_agents_create_request, @@ -46,7 +47,6 @@ build_agents_get_version_request, build_agents_list_request, build_agents_list_versions_request, - build_agents_stream_agent_container_logs_request, build_agents_update_from_manifest_request, build_agents_update_request, build_connections_get_request, @@ -198,6 +198,13 @@ async def create( *, name: str, definition: _models.AgentDefinition, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, content_type: str = "application/json", metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, @@ -215,6 +222,14 @@ async def create( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -234,12 +249,31 @@ async def create( @overload async def create( - self, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + body: JSON, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentDetails: """Creates the agent. :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -250,12 +284,31 @@ async def create( @overload async def create( - self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + body: IO[bytes], + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentDetails: """Creates the agent. :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -271,6 +324,13 @@ async def create( *, name: str = _Unset, definition: _models.AgentDefinition = _Unset, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any @@ -289,6 +349,14 @@ async def create( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. @@ -331,6 +399,7 @@ async def create( _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore _request = build_agents_create_request( + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -378,19 +447,34 @@ async def update( agent_name: str, *, definition: _models.AgentDefinition, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, content_type: str = "application/json", metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -410,15 +494,35 @@ async def update( @overload async def update( - self, agent_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: JSON, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -429,15 +533,35 @@ async def update( @overload async def update( - self, agent_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: IO[bytes], + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -453,12 +577,19 @@ async def update( body: Union[JSON, IO[bytes]] = _Unset, *, definition: _models.AgentDefinition = _Unset, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str @@ -467,6 +598,14 @@ async def update( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. @@ -508,6 +647,7 @@ async def update( _request = build_agents_update_request( agent_name=agent_name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -756,8 +896,7 @@ async def update_from_manifest( **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -788,8 +927,7 @@ async def update_from_manifest( self, agent_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -808,8 +946,7 @@ async def update_from_manifest( self, agent_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -836,8 +973,7 @@ async def update_from_manifest( **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -1099,6 +1235,13 @@ async def create_version( agent_name: str, *, definition: _models.AgentDefinition, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, content_type: str = "application/json", metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, @@ -1116,6 +1259,14 @@ async def create_version( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1135,7 +1286,19 @@ async def create_version( @overload async def create_version( - self, agent_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: JSON, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentVersionDetails: """Create a new agent version. @@ -1148,6 +1311,14 @@ async def create_version( :type agent_name: str :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1158,7 +1329,19 @@ async def create_version( @overload async def create_version( - self, agent_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: IO[bytes], + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentVersionDetails: """Create a new agent version. @@ -1171,6 +1354,14 @@ async def create_version( :type agent_name: str :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -1186,6 +1377,13 @@ async def create_version( body: Union[JSON, IO[bytes]] = _Unset, *, definition: _models.AgentDefinition = _Unset, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any @@ -1204,6 +1402,14 @@ async def create_version( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. @@ -1245,6 +1451,7 @@ async def create_version( _request = build_agents_create_version_request( agent_name=agent_name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -1720,58 +1927,32 @@ async def get_next(_continuation_token=None): return AsyncItemPaged(get_next, extract_data) + +class ConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace_async - async def stream_agent_container_logs( - self, - agent_name: str, - agent_version: str, - *, - kind: Optional[Union[str, _models.ContainerLogKind]] = None, - replica_name: Optional[str] = None, - tail: Optional[int] = None, - **kwargs: Any - ) -> None: - """Container log entry streamed from the container as text chunks. - Each chunk is a UTF-8 string that may be either a plain text log line - or a JSON-formatted log entry, depending on the type of container log being streamed. - Clients should treat each chunk as opaque text and, if needed, attempt - to parse it as JSON based on their logging requirements. - - For system logs, the format is JSON with the following structure: - {"TimeStamp":"2025-12-15T16:51:33Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting - to the events - collector...","Reason":"StartingGettingEvents","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2025-12-15T16:51:34Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully - connected to events - server","Reason":"ConnectedToEventsServer","EventSource":"ContainerAppController","Count":1} - - For console logs, the format is plain text as emitted by the container's stdout/stderr. - 2025-12-15T08:43:48.72656 Connecting to the container 'agent-container'... - 2025-12-15T08:43:48.75451 Successfully Connected to container: 'agent-container' [Revision: - 'je90fe655aa742ef9a188b9fd14d6764--7tca06b', Replica: - 'je90fe655aa742ef9a188b9fd14d6764--7tca06b-6898b9c89f-mpkjc'] - 2025-12-15T08:33:59.0671054Z stdout F INFO: 127.0.0.1:42588 - "GET /readiness HTTP/1.1" 200 - OK - 2025-12-15T08:34:29.0649033Z stdout F INFO: 127.0.0.1:60246 - "GET /readiness HTTP/1.1" 200 - OK - 2025-12-15T08:34:59.0644467Z stdout F INFO: 127.0.0.1:43994 - "GET /readiness HTTP/1.1" 200 - OK. - - :param agent_name: The name of the agent. Required. - :type agent_name: str - :param agent_version: The version of the agent. Required. - :type agent_version: str - :keyword kind: console returns container stdout/stderr, system returns container app event - stream. defaults to console. Known values are: "console" and "system". Default value is None. - :paramtype kind: str or ~azure.ai.projects.models.ContainerLogKind - :keyword replica_name: When omitted, the server chooses the first replica for console logs. - Required to target a specific replica. Default value is None. - :paramtype replica_name: str - :keyword tail: Number of trailing lines returned. Enforced to 1-300. Defaults to 20. Default - value is None. - :paramtype tail: int - :return: None - :rtype: None + async def _get(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, without populating connection credentials. + + :param name: The friendly name of the connection, provided by the user. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Connection :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -1785,14 +1966,10 @@ async def stream_agent_container_logs( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) - _request = build_agents_stream_agent_container_logs_request( - agent_name=agent_name, - agent_version=agent_version, - kind=kind, - replica_name=replica_name, - tail=tail, + _request = build_connections_get_request( + name=name, api_version=self._config.api_version, headers=_headers, params=_params, @@ -1802,7 +1979,7 @@ async def stream_agent_container_logs( } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1810,122 +1987,37 @@ async def stream_agent_container_logs( response = pipeline_response.http_response if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - -class MemoryStoresOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`memory_stores` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def create( - self, - *, - name: str, - definition: _models.MemoryStoreDefinition, - content_type: str = "application/json", - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. - - :keyword name: The name of the memory store. Required. - :paramtype name: str - :keyword definition: The memory store definition. Required. - :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ + raise HttpResponseError(response=response) - @overload - async def create( - self, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Connection, response.json()) - @overload - async def create( - self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - :param body: Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ + return deserialized # type: ignore @distributed_trace_async - async def create( - self, - body: Union[JSON, IO[bytes]] = _Unset, - *, - name: str = _Unset, - definition: _models.MemoryStoreDefinition = _Unset, - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. + async def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, with its connection credentials. - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword name: The name of the memory store. Required. - :paramtype name: str - :keyword definition: The memory store definition. Required. - :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails + :param name: The friendly name of the connection, provided by the user. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Connection :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -1936,30 +2028,14 @@ async def create( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) - - if body is _Unset: - if name is _Unset: - raise TypeError("missing required argument: name") - if definition is _Unset: - raise TypeError("missing required argument: definition") - body = {"definition": definition, "description": description, "metadata": metadata, "name": name} - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) - _request = build_memory_stores_create_request( - content_type=content_type, + _request = build_connections_get_with_credentials_request( + name=name, api_version=self._config.api_version, - content=_content, headers=_headers, params=_params, ) @@ -1982,110 +2058,49 @@ async def create( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) + deserialized = _deserialize(_models.Connection, response.json()) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - @overload - async def update( + @distributed_trace + def list( self, - name: str, *, - content_type: str = "application/json", - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. - - :param name: The name of the memory store to update. Required. - :type name: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. - - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def update( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.Connection"]: + """List all connections in the project, without populating connection credentials. - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails + :keyword connection_type: List connections of this specific type. Known values are: + "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", + "AppConfig", "AppInsights", "CustomKeys", and "RemoteTool_Preview". Default value is None. + :paramtype connection_type: str or ~azure.ai.projects.models.ConnectionType + :keyword default_connection: List connections that are default connections. Default value is + None. + :paramtype default_connection: bool + :return: An iterator like instance of Connection + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Connection] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - @distributed_trace_async - async def update( - self, - name: str, - body: Union[JSON, IO[bytes]] = _Unset, - *, - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2094,75 +2109,102 @@ async def update( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} + def prepare_request(next_link=None): + if not next_link: - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + _request = build_connections_list_request( + connection_type=connection_type, + default_connection=default_connection, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - if body is _Unset: - body = {"description": description, "metadata": metadata} - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _request = build_memory_stores_update_request( - name=name, - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + return _request - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - response = pipeline_response.http_response + async def get_next(next_link=None): + _request = prepare_request(next_link) - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs ) - raise HttpResponseError(response=response, model=error) + response = pipeline_response.http_response - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return pipeline_response - return deserialized # type: ignore + return AsyncItemPaged(get_next, extract_data) - @distributed_trace_async - async def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: - """Retrieve a memory store. - :param name: The name of the memory store to retrieve. Required. +class DatasetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`datasets` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged["_models.DatasetVersion"]: + """List all versions of the given DatasetVersion. + + :param name: The name of the resource. Required. :type name: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.DatasetVersion] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2171,85 +2213,80 @@ async def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) - - _request = build_memory_stores_get_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + def prepare_request(next_link=None): + if not next_link: - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) + _request = build_datasets_list_versions_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - response = pipeline_response.http_response + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs ) - raise HttpResponseError(response=response, model=error) + response = pipeline_response.http_response - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return pipeline_response - return deserialized # type: ignore + return AsyncItemPaged(get_next, extract_data) @distributed_trace - def list( - self, - *, - limit: Optional[int] = None, - order: Optional[Union[str, _models.PageOrder]] = None, - before: Optional[str] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.MemoryStoreDetails"]: - """List all memory stores. + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.DatasetVersion"]: + """List the latest version of each DatasetVersion. - :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the - default is 20. Default value is None. - :paramtype limit: int - :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for - ascending order and``desc`` - for descending order. Known values are: "asc" and "desc". Default value is None. - :paramtype order: str or ~azure.ai.projects.models.PageOrder - :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your - place in the list. - For instance, if you make a list request and receive 100 objects, ending with obj_foo, your - subsequent call can include before=obj_foo in order to fetch the previous page of the list. - Default value is None. - :paramtype before: str - :return: An iterator like instance of MemoryStoreDetails - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.MemoryStoreDetails] + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.DatasetVersion] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.MemoryStoreDetails]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -2259,32 +2296,52 @@ def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(_continuation_token=None): + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _request = build_memory_stores_list_request( - limit=limit, - order=order, - after=_continuation_token, - before=before, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.MemoryStoreDetails], deserialized.get("data", [])) + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("last_id") or None, AsyncList(list_of_elem) + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - async def get_next(_continuation_token=None): - _request = prepare_request(_continuation_token) + async def get_next(next_link=None): + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access @@ -2294,24 +2351,23 @@ async def get_next(_continuation_token=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreResult: - """Delete a memory store. + async def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: + """Get the specific version of the DatasetVersion. The service returns 404 Not Found error if the + DatasetVersion does not exist. - :param name: The name of the memory store to delete. Required. + :param name: The name of the resource. Required. :type name: str - :return: DeleteMemoryStoreResult. The DeleteMemoryStoreResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DeleteMemoryStoreResult + :param version: The specific version id of the DatasetVersion to retrieve. Required. + :type version: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -2325,10 +2381,11 @@ async def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreRes _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DeleteMemoryStoreResult] = kwargs.pop("cls", None) + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - _request = build_memory_stores_delete_request( + _request = build_datasets_get_request( name=name, + version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -2352,122 +2409,163 @@ async def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreRes except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DeleteMemoryStoreResult, response.json()) + deserialized = _deserialize(_models.DatasetVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @distributed_trace_async + async def delete(self, name: str, version: str, **kwargs: Any) -> None: + """Delete the specific version of the DatasetVersion. The service returns 204 No Content if the + DatasetVersion was deleted successfully or if the DatasetVersion does not exist. + + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the DatasetVersion to delete. Required. + :type version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_datasets_delete_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + @overload - async def search_memories( + async def create_or_update( self, name: str, + version: str, + dataset_version: _models.DatasetVersion, *, - scope: str, - content_type: str = "application/json", - items: Optional[List[_models.InputItem]] = None, - previous_search_id: Optional[str] = None, - options: Optional[_models.MemorySearchOptions] = None, + content_type: str = "application/merge-patch+json", **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. - :param name: The name of the memory store to search. Required. + :param name: The name of the resource. Required. :type name: str - :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :paramtype scope: str + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Required. + :type dataset_version: ~azure.ai.projects.models.DatasetVersion :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.InputItem] - :keyword previous_search_id: The unique ID of the previous search request, enabling incremental - memory search from where the last operation left off. Default value is None. - :paramtype previous_search_id: str - :keyword options: Memory search options. Default value is None. - :paramtype options: ~azure.ai.projects.models.MemorySearchOptions - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def search_memories( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. + async def create_or_update( + self, + name: str, + version: str, + dataset_version: JSON, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. - :param name: The name of the memory store to search. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Required. - :type body: JSON + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Required. + :type dataset_version: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def search_memories( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. + async def create_or_update( + self, + name: str, + version: str, + dataset_version: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. - :param name: The name of the memory store to search. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Required. - :type body: IO[bytes] + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Required. + :type dataset_version: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def search_memories( - self, - name: str, - body: Union[JSON, IO[bytes]] = _Unset, - *, - scope: str = _Unset, - items: Optional[List[_models.InputItem]] = None, - previous_search_id: Optional[str] = None, - options: Optional[_models.MemorySearchOptions] = None, - **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. + async def create_or_update( + self, name: str, version: str, dataset_version: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. - :param name: The name of the memory store to search. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :paramtype scope: str - :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.InputItem] - :keyword previous_search_id: The unique ID of the previous search request, enabling incremental - memory search from where the last operation left off. Default value is None. - :paramtype previous_search_id: str - :keyword options: Memory search options. Default value is None. - :paramtype options: ~azure.ai.projects.models.MemorySearchOptions - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Is one of the following types: + DatasetVersion, JSON, IO[bytes] Required. + :type dataset_version: ~azure.ai.projects.models.DatasetVersion or JSON or IO[bytes] + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -2482,27 +2580,18 @@ async def search_memories( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreSearchResult] = kwargs.pop("cls", None) + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - if body is _Unset: - if scope is _Unset: - raise TypeError("missing required argument: scope") - body = { - "items": items, - "options": options, - "previous_search_id": previous_search_id, - "scope": scope, - } - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" + content_type = content_type or "application/merge-patch+json" _content = None - if isinstance(body, (IOBase, bytes)): - _content = body + if isinstance(dataset_version, (IOBase, bytes)): + _content = dataset_version else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(dataset_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_memory_stores_search_memories_request( + _request = build_datasets_create_or_update_request( name=name, + version=version, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -2521,40 +2610,125 @@ async def search_memories( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 201]: if _stream: try: await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.MemoryStoreSearchResult, response.json()) + deserialized = _deserialize(_models.DatasetVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _update_memories_initial( + @overload + async def pending_upload( self, name: str, - body: Union[JSON, IO[bytes]] = _Unset, + version: str, + pending_upload_request: _models.PendingUploadRequest, *, - scope: str = _Unset, - items: Optional[List[_models.InputItem]] = None, - previous_update_id: Optional[str] = None, - update_delay: Optional[int] = None, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncIterator[bytes]: + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Required. + :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def pending_upload( + self, + name: str, + version: str, + pending_upload_request: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Required. + :type pending_upload_request: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def pending_upload( + self, + name: str, + version: str, + pending_upload_request: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Required. + :type pending_upload_request: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def pending_upload( + self, + name: str, + version: str, + pending_upload_request: Union[_models.PendingUploadRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Is one of the following + types: PendingUploadRequest, JSON, IO[bytes] Required. + :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest or JSON or + IO[bytes] + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2567,27 +2741,18 @@ async def _update_memories_initial( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + cls: ClsType[_models.PendingUploadResponse] = kwargs.pop("cls", None) - if body is _Unset: - if scope is _Unset: - raise TypeError("missing required argument: scope") - body = { - "items": items, - "previous_update_id": previous_update_id, - "scope": scope, - "update_delay": update_delay, - } - body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None - if isinstance(body, (IOBase, bytes)): - _content = body + if isinstance(pending_upload_request, (IOBase, bytes)): + _content = pending_upload_request else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(pending_upload_request, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_memory_stores_update_memories_request( + _request = build_datasets_pending_upload_request( name=name, + version=version, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -2599,229 +2764,42 @@ async def _update_memories_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = True + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [202]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + raise HttpResponseError(response=response) - deserialized = response.iter_bytes() + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PendingUploadResponse, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @overload - async def _begin_update_memories( - self, - name: str, - *, - scope: str, - content_type: str = "application/json", - items: Optional[List[_models.InputItem]] = None, - previous_update_id: Optional[str] = None, - update_delay: Optional[int] = None, - **kwargs: Any - ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... - @overload - async def _begin_update_memories( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... - @overload - async def _begin_update_memories( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... - - @distributed_trace_async - async def _begin_update_memories( - self, - name: str, - body: Union[JSON, IO[bytes]] = _Unset, - *, - scope: str = _Unset, - items: Optional[List[_models.InputItem]] = None, - previous_update_id: Optional[str] = None, - update_delay: Optional[int] = None, - **kwargs: Any - ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: - """Update memory store with conversation memories. - - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :paramtype scope: str - :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.InputItem] - :keyword previous_update_id: The unique ID of the previous update request, enabling incremental - memory updates from where the last operation left off. Default value is None. - :paramtype previous_update_id: str - :keyword update_delay: Timeout period before processing the memory update in seconds. - If a new update request is received during this period, it will cancel the current request and - reset the timeout. - Set to 0 to immediately trigger the update without delay. - Defaults to 300 (5 minutes). Default value is None. - :paramtype update_delay: int - :return: An instance of AsyncLROPoller that returns MemoryStoreUpdateCompletedResult. The - MemoryStoreUpdateCompletedResult is compatible with MutableMapping - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.ai.projects.models.MemoryStoreUpdateCompletedResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreUpdateCompletedResult] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._update_memories_initial( - name=name, - body=body, - scope=scope, - items=items, - previous_update_id=previous_update_id, - update_delay=update_delay, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.MemoryStoreUpdateCompletedResult, response.json().get("result", {})) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @overload - async def delete_scope( - self, name: str, *, scope: str, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. - - :param name: The name of the memory store. Required. - :type name: str - :keyword scope: The namespace that logically groups and isolates memories to delete, such as a - user ID. Required. - :paramtype scope: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def delete_scope( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. - - :param name: The name of the memory store. Required. - :type name: str - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def delete_scope( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. - - :param name: The name of the memory store. Required. - :type name: str - :param body: Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - @distributed_trace_async - async def delete_scope( - self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. + async def get_credentials(self, name: str, version: str, **kwargs: Any) -> _models.DatasetCredential: + """Get the SAS credential to access the storage account associated with a Dataset version. - :param name: The name of the memory store. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword scope: The namespace that logically groups and isolates memories to delete, such as a - user ID. Required. - :paramtype scope: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :return: DatasetCredential. The DatasetCredential is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetCredential :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -2832,29 +2810,15 @@ async def delete_scope( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreDeleteScopeResult] = kwargs.pop("cls", None) - - if body is _Unset: - if scope is _Unset: - raise TypeError("missing required argument: scope") - body = {"scope": scope} - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + cls: ClsType[_models.DatasetCredential] = kwargs.pop("cls", None) - _request = build_memory_stores_delete_scope_request( + _request = build_datasets_get_credentials_request( name=name, - content_type=content_type, + version=version, api_version=self._config.api_version, - content=_content, headers=_headers, params=_params, ) @@ -2877,16 +2841,12 @@ async def delete_scope( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.MemoryStoreDeleteScopeResult, response.json()) + deserialized = _deserialize(_models.DatasetCredential, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2894,14 +2854,14 @@ async def delete_scope( return deserialized # type: ignore -class ConnectionsOperations: +class DeploymentsOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`connections` attribute. + :attr:`deployments` attribute. """ def __init__(self, *args, **kwargs) -> None: @@ -2912,78 +2872,13 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def _get(self, name: str, **kwargs: Any) -> _models.Connection: - """Get a connection by name, without populating connection credentials. - - :param name: The friendly name of the connection, provided by the user. Required. - :type name: str - :return: Connection. The Connection is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Connection - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.Connection] = kwargs.pop("cls", None) - - _request = build_connections_get_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.Connection, response.json()) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: - """Get a connection by name, with its connection credentials. + async def get(self, name: str, **kwargs: Any) -> _models.Deployment: + """Get a deployed model. - :param name: The friendly name of the connection, provided by the user. Required. + :param name: Name of the deployment. Required. :type name: str - :return: Connection. The Connection is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Connection + :return: Deployment. The Deployment is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Deployment :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -2997,9 +2892,9 @@ async def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Conne _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + cls: ClsType[_models.Deployment] = kwargs.pop("cls", None) - _request = build_connections_get_with_credentials_request( + _request = build_deployments_get_request( name=name, api_version=self._config.api_version, headers=_headers, @@ -3034,7 +2929,7 @@ async def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Conne if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Connection, response.json()) + deserialized = _deserialize(_models.Deployment, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3045,27 +2940,29 @@ async def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Conne def list( self, *, - connection_type: Optional[Union[str, _models.ConnectionType]] = None, - default_connection: Optional[bool] = None, + model_publisher: Optional[str] = None, + model_name: Optional[str] = None, + deployment_type: Optional[Union[str, _models.DeploymentType]] = None, **kwargs: Any - ) -> AsyncItemPaged["_models.Connection"]: - """List all connections in the project, without populating connection credentials. + ) -> AsyncItemPaged["_models.Deployment"]: + """List all deployed models in the project. - :keyword connection_type: List connections of this specific type. Known values are: - "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", - "AppConfig", "AppInsights", "CustomKeys", and "RemoteTool". Default value is None. - :paramtype connection_type: str or ~azure.ai.projects.models.ConnectionType - :keyword default_connection: List connections that are default connections. Default value is - None. - :paramtype default_connection: bool - :return: An iterator like instance of Connection - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Connection] + :keyword model_publisher: Model publisher to filter models by. Default value is None. + :paramtype model_publisher: str + :keyword model_name: Model name (the publisher specific name) to filter models by. Default + value is None. + :paramtype model_name: str + :keyword deployment_type: Type of deployment to filter list by. "ModelDeployment" Default value + is None. + :paramtype deployment_type: str or ~azure.ai.projects.models.DeploymentType + :return: An iterator like instance of Deployment + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Deployment] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Deployment]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3078,9 +2975,10 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_connections_list_request( - connection_type=connection_type, - default_connection=default_connection, + _request = build_deployments_list_request( + model_publisher=model_publisher, + model_name=model_name, + deployment_type=deployment_type, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3116,7 +3014,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Deployment], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3139,14 +3037,14 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) -class DatasetsOperations: +class EvaluationTaxonomiesOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`datasets` attribute. + :attr:`evaluation_taxonomies` attribute. """ def __init__(self, *args, **kwargs) -> None: @@ -3156,21 +3054,16 @@ def __init__(self, *args, **kwargs) -> None: self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged["_models.DatasetVersion"]: - """List all versions of the given DatasetVersion. + @distributed_trace_async + async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: + """Get an evaluation run by name. :param name: The name of the resource. Required. :type name: str - :return: An iterator like instance of DatasetVersion - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.DatasetVersion] + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) - error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3179,80 +3072,71 @@ def list_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged["_models.Dat } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - _request = build_datasets_list_versions_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + _request = build_evaluation_taxonomies_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - return _request + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - async def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + response = pipeline_response.http_response - async def get_next(next_link=None): - _request = prepare_request(next_link) + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return AsyncItemPaged(get_next, extract_data) + return deserialized # type: ignore @distributed_trace - def list(self, **kwargs: Any) -> AsyncItemPaged["_models.DatasetVersion"]: - """List the latest version of each DatasetVersion. + def list( + self, *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.EvaluationTaxonomy"]: + """List evaluation taxonomies. - :return: An iterator like instance of DatasetVersion - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.DatasetVersion] + :keyword input_name: Filter by the evaluation input name. Default value is None. + :paramtype input_name: str + :keyword input_type: Filter by taxonomy input type. Default value is None. + :paramtype input_type: str + :return: An iterator like instance of EvaluationTaxonomy + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluationTaxonomy] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.EvaluationTaxonomy]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3265,7 +3149,9 @@ def list(self, **kwargs: Any) -> AsyncItemPaged["_models.DatasetVersion"]: def prepare_request(next_link=None): if not next_link: - _request = build_datasets_list_request( + _request = build_evaluation_taxonomies_list_request( + input_name=input_name, + input_type=input_type, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3301,7 +3187,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.EvaluationTaxonomy], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3324,16 +3210,325 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: - """Get the specific version of the DatasetVersion. The service returns 404 Not Found error if the - DatasetVersion does not exist. + async def delete(self, name: str, **kwargs: Any) -> None: + """Delete an evaluation taxonomy by name. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the DatasetVersion to retrieve. Required. - :type version: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_evaluation_taxonomies_delete_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @overload + async def create( + self, + name: str, + body: _models.EvaluationTaxonomy, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + name: str, + body: JSON, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + name: str, + body: IO[bytes], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create( + self, + name: str, + body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, + IO[bytes] Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluation_taxonomies_create_request( + name=name, + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def update( + self, + name: str, + body: _models.EvaluationTaxonomy, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + name: str, + body: JSON, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + name: str, + body: IO[bytes], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + name: str, + body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. + + :param name: The name of the evaluation taxonomy. Required. + :type name: str + :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, + IO[bytes] Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3344,15 +3539,25 @@ async def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVe } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) - _request = build_datasets_get_request( + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluation_taxonomies_update_request( name=name, - version=version, + foundry_features=foundry_features, + content_type=content_type, api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) @@ -3380,24 +3585,39 @@ async def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVe if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DatasetVersion, response.json()) + deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + +class EvaluationRulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`evaluation_rules` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace_async - async def delete(self, name: str, version: str, **kwargs: Any) -> None: - """Delete the specific version of the DatasetVersion. The service returns 204 No Content if the - DatasetVersion was deleted successfully or if the DatasetVersion does not exist. + async def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: + """Get an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :param version: The version of the DatasetVersion to delete. Required. - :type version: str - :return: None - :rtype: None + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3411,11 +3631,10 @@ async def delete(self, name: str, version: str, **kwargs: Any) -> None: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) - _request = build_datasets_delete_request( - name=name, - version=version, + _request = build_evaluation_rules_get_request( + id=id, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3425,113 +3644,45 @@ async def delete(self, name: str, version: str, **kwargs: Any) -> None: } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [204]: + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if cls: - return cls(pipeline_response, None, {}) # type: ignore - - @overload - async def create_or_update( - self, - name: str, - version: str, - dataset_version: _models.DatasetVersion, - *, - content_type: str = "application/merge-patch+json", - **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. - - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Required. - :type dataset_version: ~azure.ai.projects.models.DatasetVersion - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". - :paramtype content_type: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create_or_update( - self, - name: str, - version: str, - dataset_version: JSON, - *, - content_type: str = "application/merge-patch+json", - **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Required. - :type dataset_version: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". - :paramtype content_type: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.EvaluationRule, response.json()) - @overload - async def create_or_update( - self, - name: str, - version: str, - dataset_version: IO[bytes], - *, - content_type: str = "application/merge-patch+json", - **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Required. - :type dataset_version: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/merge-patch+json". - :paramtype content_type: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ + return deserialized # type: ignore @distributed_trace_async - async def create_or_update( - self, name: str, version: str, dataset_version: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + async def delete(self, id: str, **kwargs: Any) -> None: + """Delete an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Is one of the following types: - DatasetVersion, JSON, IO[bytes] Required. - :type dataset_version: ~azure.ai.projects.models.DatasetVersion or JSON or IO[bytes] - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :return: None + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3542,25 +3693,14 @@ async def create_or_update( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - - content_type = content_type or "application/merge-patch+json" - _content = None - if isinstance(dataset_version, (IOBase, bytes)): - _content = dataset_version - else: - _content = json.dumps(dataset_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_datasets_create_or_update_request( - name=name, - version=version, - content_type=content_type, + _request = build_evaluation_rules_delete_request( + id=id, api_version=self._config.api_version, - content=_content, headers=_headers, params=_params, ) @@ -3569,130 +3709,92 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200, 201]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass + if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.DatasetVersion, response.json()) + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore @overload - async def pending_upload( - self, - name: str, - version: str, - pending_upload_request: _models.PendingUploadRequest, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + async def create_or_update( + self, id: str, evaluation_rule: _models.EvaluationRule, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Required. - :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Required. + :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def pending_upload( - self, - name: str, - version: str, - pending_upload_request: JSON, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule + :raises ~azure.core.exceptions.HttpResponseError: + """ - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Required. - :type pending_upload_request: JSON + @overload + async def create_or_update( + self, id: str, evaluation_rule: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. + + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Required. + :type evaluation_rule: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def pending_upload( - self, - name: str, - version: str, - pending_upload_request: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + async def create_or_update( + self, id: str, evaluation_rule: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Required. - :type pending_upload_request: IO[bytes] + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Required. + :type evaluation_rule: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def pending_upload( - self, - name: str, - version: str, - pending_upload_request: Union[_models.PendingUploadRequest, JSON, IO[bytes]], - **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + async def create_or_update( + self, id: str, evaluation_rule: Union[_models.EvaluationRule, JSON, IO[bytes]], **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Is one of the following - types: PendingUploadRequest, JSON, IO[bytes] Required. - :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest or JSON or - IO[bytes] - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Is one of the following types: + EvaluationRule, JSON, IO[bytes] Required. + :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule or JSON or IO[bytes] + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3707,18 +3809,17 @@ async def pending_upload( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.PendingUploadResponse] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) content_type = content_type or "application/json" _content = None - if isinstance(pending_upload_request, (IOBase, bytes)): - _content = pending_upload_request + if isinstance(evaluation_rule, (IOBase, bytes)): + _content = evaluation_rule else: - _content = json.dumps(pending_upload_request, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(evaluation_rule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_pending_upload_request( - name=name, - version=version, + _request = build_evaluation_rules_create_or_update_request( + id=id, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -3737,7 +3838,7 @@ async def pending_upload( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 201]: if _stream: try: await response.read() # Load the body in memory and close the socket @@ -3749,25 +3850,40 @@ async def pending_upload( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.PendingUploadResponse, response.json()) + deserialized = _deserialize(_models.EvaluationRule, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @distributed_trace_async - async def get_credentials(self, name: str, version: str, **kwargs: Any) -> _models.DatasetCredential: - """Get the SAS credential to access the storage account associated with a Dataset version. + @distributed_trace + def list( + self, + *, + action_type: Optional[Union[str, _models.EvaluationRuleActionType]] = None, + agent_name: Optional[str] = None, + enabled: Optional[bool] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.EvaluationRule"]: + """List all evaluation rules. - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :return: DatasetCredential. The DatasetCredential is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetCredential + :keyword action_type: Filter by the type of evaluation rule. Known values are: + "continuousEvaluation" and "humanEvaluation". Default value is None. + :paramtype action_type: str or ~azure.ai.projects.models.EvaluationRuleActionType + :keyword agent_name: Filter by the agent name. Default value is None. + :paramtype agent_name: str + :keyword enabled: Filter by the enabled status. Default value is None. + :paramtype enabled: bool + :return: An iterator like instance of EvaluationRule + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluationRule] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.EvaluationRule]] = kwargs.pop("cls", None) + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3776,58 +3892,79 @@ async def get_credentials(self, name: str, version: str, **kwargs: Any) -> _mode } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} + def prepare_request(next_link=None): + if not next_link: - cls: ClsType[_models.DatasetCredential] = kwargs.pop("cls", None) + _request = build_evaluation_rules_list_request( + action_type=action_type, + agent_name=agent_name, + enabled=enabled, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _request = build_datasets_get_credentials_request( - name=name, - version=version, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) + return _request - response = pipeline_response.http_response + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.EvaluationRule], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + async def get_next(next_link=None): + _request = prepare_request(next_link) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.DatasetCredential, response.json()) + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - return deserialized # type: ignore + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) -class IndexesOperations: +class EvaluatorsOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`indexes` attribute. + :attr:`evaluators` attribute. """ def __init__(self, *args, **kwargs) -> None: @@ -3838,19 +3975,37 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged["_models.Index"]: - """List all versions of the given Index. + def list_versions( + self, + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + limit: Optional[int] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.EvaluatorVersion"]: + """List all versions of the given evaluator. :param name: The name of the resource. Required. :type name: str - :return: An iterator like instance of Index - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Index] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one + of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default + value is None. + :paramtype type: str or str or str or str + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :return: An iterator like instance of EvaluatorVersion + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluatorVersion] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3863,8 +4018,11 @@ def list_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged["_models.Ind def prepare_request(next_link=None): if not next_link: - _request = build_indexes_list_versions_request( + _request = build_evaluators_list_versions_request( name=name, + foundry_features=foundry_features, + type=type, + limit=limit, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3900,7 +4058,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3922,18 +4080,35 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - @distributed_trace - def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Index"]: - """List the latest version of each Index. + @distributed_trace + def list_latest_versions( + self, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + limit: Optional[int] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.EvaluatorVersion"]: + """List the latest version of each evaluator. - :return: An iterator like instance of Index - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Index] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one + of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default + value is None. + :paramtype type: str or str or str or str + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :return: An iterator like instance of EvaluatorVersion + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluatorVersion] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3946,7 +4121,10 @@ def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Index"]: def prepare_request(next_link=None): if not next_link: - _request = build_indexes_list_request( + _request = build_evaluators_list_latest_versions_request( + foundry_features=foundry_features, + type=type, + limit=limit, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3982,7 +4160,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -4005,16 +4183,26 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: - """Get the specific version of the Index. The service returns 404 Not Found error if the Index - does not exist. + async def get_version( + self, + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Get the specific version of the EvaluatorVersion. The service returns 404 Not Found error if + the EvaluatorVersion does not exist. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to retrieve. Required. + :param version: The specific version id of the EvaluatorVersion to retrieve. Required. :type version: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4028,11 +4216,12 @@ async def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Index] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) - _request = build_indexes_get_request( + _request = build_evaluators_get_version_request( name=name, version=version, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -4061,7 +4250,7 @@ async def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Index, response.json()) + deserialized = _deserialize(_models.EvaluatorVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4069,14 +4258,24 @@ async def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: return deserialized # type: ignore @distributed_trace_async - async def delete(self, name: str, version: str, **kwargs: Any) -> None: - """Delete the specific version of the Index. The service returns 204 No Content if the Index was - deleted successfully or if the Index does not exist. + async def delete_version( + self, + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> None: + """Delete the specific version of the EvaluatorVersion. The service returns 204 No Content if the + EvaluatorVersion was deleted successfully or if the EvaluatorVersion does not exist. :param name: The name of the resource. Required. :type name: str - :param version: The version of the Index to delete. Required. + :param version: The version of the EvaluatorVersion to delete. Required. :type version: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -4094,9 +4293,10 @@ async def delete(self, name: str, version: str, **kwargs: Any) -> None: cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_indexes_delete_request( + _request = build_evaluators_delete_version_request( name=name, version=version, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -4121,92 +4321,107 @@ async def delete(self, name: str, version: str, **kwargs: Any) -> None: return cls(pipeline_response, None, {}) # type: ignore @overload - async def create_or_update( + async def create_version( self, name: str, - version: str, - index: _models.Index, + evaluator_version: _models.EvaluatorVersion, *, - content_type: str = "application/merge-patch+json", + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. - :type version: str - :param index: The Index to create or update. Required. - :type index: ~azure.ai.projects.models.Index + :param evaluator_version: Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def create_or_update( - self, name: str, version: str, index: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + async def create_version( + self, + name: str, + evaluator_version: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. - :type version: str - :param index: The Index to create or update. Required. - :type index: JSON + :param evaluator_version: Required. + :type evaluator_version: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def create_or_update( + async def create_version( self, name: str, - version: str, - index: IO[bytes], + evaluator_version: IO[bytes], *, - content_type: str = "application/merge-patch+json", + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. - :type version: str - :param index: The Index to create or update. Required. - :type index: IO[bytes] + :param evaluator_version: Required. + :type evaluator_version: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def create_or_update( - self, name: str, version: str, index: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + async def create_version( + self, + name: str, + evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. - :type version: str - :param index: The Index to create or update. Is one of the following types: Index, JSON, - IO[bytes] Required. - :type index: ~azure.ai.projects.models.Index or JSON or IO[bytes] - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :param evaluator_version: Is one of the following types: EvaluatorVersion, JSON, IO[bytes] + Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4221,18 +4436,18 @@ async def create_or_update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Index] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) - content_type = content_type or "application/merge-patch+json" + content_type = content_type or "application/json" _content = None - if isinstance(index, (IOBase, bytes)): - _content = index + if isinstance(evaluator_version, (IOBase, bytes)): + _content = evaluator_version else: - _content = json.dumps(index, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_indexes_create_or_update_request( + _request = build_evaluators_create_version_request( name=name, - version=version, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -4251,7 +4466,7 @@ async def create_or_update( response = pipeline_response.http_response - if response.status_code not in [200, 201]: + if response.status_code not in [201]: if _stream: try: await response.read() # Load the body in memory and close the socket @@ -4263,39 +4478,127 @@ async def create_or_update( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Index, response.json()) + deserialized = _deserialize(_models.EvaluatorVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @overload + async def update_version( + self, + name: str, + version: str, + evaluator_version: _models.EvaluatorVersion, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. -class DeploymentsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the EvaluatorVersion to update. Required. + :type version: str + :param evaluator_version: Evaluator resource. Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`deployments` attribute. - """ + @overload + async def update_version( + self, + name: str, + version: str, + evaluator_version: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the EvaluatorVersion to update. Required. + :type version: str + :param evaluator_version: Evaluator resource. Required. + :type evaluator_version: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_version( + self, + name: str, + version: str, + evaluator_version: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the EvaluatorVersion to update. Required. + :type version: str + :param evaluator_version: Evaluator resource. Required. + :type evaluator_version: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async - async def get(self, name: str, **kwargs: Any) -> _models.Deployment: - """Get a deployed model. + async def update_version( + self, + name: str, + version: str, + evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. - :param name: Name of the deployment. Required. + :param name: The name of the resource. Required. :type name: str - :return: Deployment. The Deployment is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Deployment + :param version: The version of the EvaluatorVersion to update. Required. + :type version: str + :param evaluator_version: Evaluator resource. Is one of the following types: EvaluatorVersion, + JSON, IO[bytes] Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4306,14 +4609,26 @@ async def get(self, name: str, **kwargs: Any) -> _models.Deployment: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Deployment] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) - _request = build_deployments_get_request( + content_type = content_type or "application/json" + _content = None + if isinstance(evaluator_version, (IOBase, bytes)): + _content = evaluator_version + else: + _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluators_update_version_request( name=name, + version=version, + foundry_features=foundry_features, + content_type=content_type, api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) @@ -4338,48 +4653,48 @@ async def get(self, name: str, **kwargs: Any) -> _models.Deployment: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Deployment, response.json()) + deserialized = _deserialize(_models.EvaluatorVersion, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + +class IndexesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`indexes` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace - def list( - self, - *, - model_publisher: Optional[str] = None, - model_name: Optional[str] = None, - deployment_type: Optional[Union[str, _models.DeploymentType]] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.Deployment"]: - """List all deployed models in the project. + def list_versions(self, name: str, **kwargs: Any) -> AsyncItemPaged["_models.Index"]: + """List all versions of the given Index. - :keyword model_publisher: Model publisher to filter models by. Default value is None. - :paramtype model_publisher: str - :keyword model_name: Model name (the publisher specific name) to filter models by. Default - value is None. - :paramtype model_name: str - :keyword deployment_type: Type of deployment to filter list by. "ModelDeployment" Default value - is None. - :paramtype deployment_type: str or ~azure.ai.projects.models.DeploymentType - :return: An iterator like instance of Deployment - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Deployment] + :param name: The name of the resource. Required. + :type name: str + :return: An iterator like instance of Index + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Index] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Deployment]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4392,10 +4707,8 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_deployments_list_request( - model_publisher=model_publisher, - model_name=model_name, - deployment_type=deployment_type, + _request = build_indexes_list_versions_request( + name=name, api_version=self._config.api_version, headers=_headers, params=_params, @@ -4431,7 +4744,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Deployment], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -4453,101 +4766,18 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - -class RedTeamsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`red_teams` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: - """Get a redteam by name. - - :param name: Identifier of the red team run. Required. - :type name: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) - - _request = build_red_teams_get_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.RedTeam, response.json()) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - @distributed_trace - def list(self, **kwargs: Any) -> AsyncItemPaged["_models.RedTeam"]: - """List a redteam by name. + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Index"]: + """List the latest version of each Index. - :return: An iterator like instance of RedTeam - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.RedTeam] + :return: An iterator like instance of Index + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Index] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.RedTeam]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4560,7 +4790,7 @@ def list(self, **kwargs: Any) -> AsyncItemPaged["_models.RedTeam"]: def prepare_request(next_link=None): if not next_link: - _request = build_red_teams_list_request( + _request = build_indexes_list_request( api_version=self._config.api_version, headers=_headers, params=_params, @@ -4596,7 +4826,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.RedTeam], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -4618,61 +4848,17 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - @overload - async def create( - self, red_team: _models.RedTeam, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Required. - :type red_team: ~azure.ai.projects.models.RedTeam - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create(self, red_team: JSON, *, content_type: str = "application/json", **kwargs: Any) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Required. - :type red_team: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def create( - self, red_team: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Required. - :type red_team: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - @distributed_trace_async - async def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: - """Creates a redteam run. + async def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: + """Get the specific version of the Index. The service returns 404 Not Found error if the Index + does not exist. - :param red_team: Redteam to be run. Is one of the following types: RedTeam, JSON, IO[bytes] - Required. - :type red_team: ~azure.ai.projects.models.RedTeam or JSON or IO[bytes] - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to retrieve. Required. + :type version: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4683,23 +4869,15 @@ async def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwar } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _content = None - if isinstance(red_team, (IOBase, bytes)): - _content = red_team - else: - _content = json.dumps(red_team, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + cls: ClsType[_models.Index] = kwargs.pop("cls", None) - _request = build_red_teams_create_request( - content_type=content_type, + _request = build_indexes_get_request( + name=name, + version=version, api_version=self._config.api_version, - content=_content, headers=_headers, params=_params, ) @@ -4715,7 +4893,7 @@ async def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwar response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: if _stream: try: await response.read() # Load the body in memory and close the socket @@ -4727,39 +4905,24 @@ async def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwar if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.RedTeam, response.json()) + deserialized = _deserialize(_models.Index, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - -class EvaluationRulesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`evaluation_rules` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async - async def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: - """Get an evaluation rule. + async def delete(self, name: str, version: str, **kwargs: Any) -> None: + """Delete the specific version of the Index. The service returns 204 No Content if the Index was + deleted successfully or if the Index does not exist. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the Index to delete. Required. + :type version: str + :return: None + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4773,10 +4936,11 @@ async def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) + cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_evaluation_rules_get_request( - id=id, + _request = build_indexes_delete_request( + name=name, + version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -4786,45 +4950,107 @@ async def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass + if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) + if cls: + return cls(pipeline_response, None, {}) # type: ignore - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.EvaluationRule, response.json()) + @overload + async def create_or_update( + self, + name: str, + version: str, + index: _models.Index, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Required. + :type index: ~azure.ai.projects.models.Index + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ - return deserialized # type: ignore + @overload + async def create_or_update( + self, name: str, version: str, index: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Required. + :type index: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + name: str, + version: str, + index: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Required. + :type index: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async - async def delete(self, id: str, **kwargs: Any) -> None: - """Delete an evaluation rule. + async def create_or_update( + self, name: str, version: str, index: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :return: None - :rtype: None + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Is one of the following types: Index, JSON, + IO[bytes] Required. + :type index: ~azure.ai.projects.models.Index or JSON or IO[bytes] + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4835,14 +5061,25 @@ async def delete(self, id: str, **kwargs: Any) -> None: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - cls: ClsType[None] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Index] = kwargs.pop("cls", None) - _request = build_evaluation_rules_delete_request( - id=id, + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(index, (IOBase, bytes)): + _content = index + else: + _content = json.dumps(index, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_indexes_create_or_update_request( + name=name, + version=version, + content_type=content_type, api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) @@ -4851,92 +5088,143 @@ async def delete(self, id: str, **kwargs: Any) -> None: } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [204]: + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Index, response.json()) if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class InsightsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`insights` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload - async def create_or_update( - self, id: str, evaluation_rule: _models.EvaluationRule, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + async def generate( + self, + insight: _models.Insight, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Required. - :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Required. + :type insight: ~azure.ai.projects.models.Insight + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". - :paramtype content_type: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :paramtype content_type: str + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def create_or_update( - self, id: str, evaluation_rule: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + async def generate( + self, + insight: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Required. - :type evaluation_rule: JSON + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Required. + :type insight: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def create_or_update( - self, id: str, evaluation_rule: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + async def generate( + self, + insight: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Required. - :type evaluation_rule: IO[bytes] + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Required. + :type insight: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def create_or_update( - self, id: str, evaluation_rule: Union[_models.EvaluationRule, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + async def generate( + self, + insight: Union[_models.Insight, JSON, IO[bytes]], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Is one of the following types: - EvaluationRule, JSON, IO[bytes] Required. - :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule or JSON or IO[bytes] - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Is one of the following types: Insight, JSON, IO[bytes] Required. + :type insight: ~azure.ai.projects.models.Insight or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4951,17 +5239,17 @@ async def create_or_update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) + cls: ClsType[_models.Insight] = kwargs.pop("cls", None) content_type = content_type or "application/json" _content = None - if isinstance(evaluation_rule, (IOBase, bytes)): - _content = evaluation_rule + if isinstance(insight, (IOBase, bytes)): + _content = insight else: - _content = json.dumps(evaluation_rule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(insight, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluation_rules_create_or_update_request( - id=id, + _request = build_insights_generate_request( + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -4980,7 +5268,7 @@ async def create_or_update( response = pipeline_response.http_response - if response.status_code not in [200, 201]: + if response.status_code not in [201]: if _stream: try: await response.read() # Load the body in memory and close the socket @@ -4992,138 +5280,24 @@ async def create_or_update( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationRule, response.json()) + deserialized = _deserialize(_models.Insight, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @distributed_trace - def list( - self, - *, - action_type: Optional[Union[str, _models.EvaluationRuleActionType]] = None, - agent_name: Optional[str] = None, - enabled: Optional[bool] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.EvaluationRule"]: - """List all evaluation rules. - - :keyword action_type: Filter by the type of evaluation rule. Known values are: - "continuousEvaluation" and "humanEvaluation". Default value is None. - :paramtype action_type: str or ~azure.ai.projects.models.EvaluationRuleActionType - :keyword agent_name: Filter by the agent name. Default value is None. - :paramtype agent_name: str - :keyword enabled: Filter by the enabled status. Default value is None. - :paramtype enabled: bool - :return: An iterator like instance of EvaluationRule - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluationRule] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.EvaluationRule]] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_evaluation_rules_list_request( - action_type=action_type, - agent_name=agent_name, - enabled=enabled, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - return _request - - async def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationRule], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - -class EvaluationTaxonomiesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`evaluation_taxonomies` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async - async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: - """Get an evaluation run by name. + async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> _models.Insight: + """Get a specific insight by Id. - :param name: The name of the resource. Required. - :type name: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :param id: The unique identifier for the insights report. Required. + :type id: str + :keyword include_coordinates: Whether to include coordinates for visualization in the response. + Defaults to false. Default value is None. + :paramtype include_coordinates: bool + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5137,10 +5311,11 @@ async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + cls: ClsType[_models.Insight] = kwargs.pop("cls", None) - _request = build_evaluation_taxonomies_get_request( - name=name, + _request = build_insights_get_request( + id=id, + include_coordinates=include_coordinates, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5174,7 +5349,7 @@ async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + deserialized = _deserialize(_models.Insight, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -5183,22 +5358,37 @@ async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: @distributed_trace def list( - self, *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any - ) -> AsyncItemPaged["_models.EvaluationTaxonomy"]: - """List evaluation taxonomies. + self, + *, + type: Optional[Union[str, _models.InsightType]] = None, + eval_id: Optional[str] = None, + run_id: Optional[str] = None, + agent_name: Optional[str] = None, + include_coordinates: Optional[bool] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.Insight"]: + """List all insights in reverse chronological order (newest first). - :keyword input_name: Filter by the evaluation input name. Default value is None. - :paramtype input_name: str - :keyword input_type: Filter by taxonomy input type. Default value is None. - :paramtype input_type: str - :return: An iterator like instance of EvaluationTaxonomy - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluationTaxonomy] + :keyword type: Filter by the type of analysis. Known values are: "EvaluationRunClusterInsight", + "AgentClusterInsight", and "EvaluationComparison". Default value is None. + :paramtype type: str or ~azure.ai.projects.models.InsightType + :keyword eval_id: Filter by the evaluation ID. Default value is None. + :paramtype eval_id: str + :keyword run_id: Filter by the evaluation run ID. Default value is None. + :paramtype run_id: str + :keyword agent_name: Filter by the agent name. Default value is None. + :paramtype agent_name: str + :keyword include_coordinates: Whether to include coordinates for visualization in the response. + Defaults to false. Default value is None. + :paramtype include_coordinates: bool + :return: An iterator like instance of Insight + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Insight] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.EvaluationTaxonomy]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Insight]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -5211,9 +5401,12 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_evaluation_taxonomies_list_request( - input_name=input_name, - input_type=input_type, + _request = build_insights_list_request( + type=type, + eval_id=eval_id, + run_id=run_id, + agent_name=agent_name, + include_coordinates=include_coordinates, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5249,7 +5442,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationTaxonomy], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Insight], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -5271,126 +5464,136 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - @distributed_trace_async - async def delete(self, name: str, **kwargs: Any) -> None: - """Delete an evaluation taxonomy by name. - - :param name: The name of the resource. Required. - :type name: str - :return: None - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_evaluation_taxonomies_delete_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) +class MemoryStoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`memory_stores` attribute. + """ - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload async def create( - self, name: str, body: _models.EvaluationTaxonomy, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + name: str, + definition: _models.MemoryStoreDefinition, + content_type: str = "application/json", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword name: The name of the memory store. Required. + :paramtype name: str + :keyword definition: The memory store definition. Required. + :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload async def create( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload async def create( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async async def create( - self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + name: str = _Unset, + definition: _models.MemoryStoreDefinition = _Unset, + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, - IO[bytes] Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword name: The name of the memory store. Required. + :paramtype name: str + :keyword definition: The memory store definition. Required. + :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5405,8 +5608,15 @@ async def create( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + if body is _Unset: + if name is _Unset: + raise TypeError("missing required argument: name") + if definition is _Unset: + raise TypeError("missing required argument: definition") + body = {"definition": definition, "description": description, "metadata": metadata, "name": name} + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None if isinstance(body, (IOBase, bytes)): @@ -5414,8 +5624,8 @@ async def create( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluation_taxonomies_create_request( - name=name, + _request = build_memory_stores_create_request( + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -5434,19 +5644,23 @@ async def create( response = pipeline_response.http_response - if response.status_code not in [200, 201]: + if response.status_code not in [200]: if _stream: try: await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5455,71 +5669,116 @@ async def create( @overload async def update( - self, name: str, body: _models.EvaluationTaxonomy, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload async def update( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload async def update( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async async def update( - self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, - IO[bytes] Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5534,8 +5793,11 @@ async def update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + if body is _Unset: + body = {"description": description, "metadata": metadata} + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None if isinstance(body, (IOBase, bytes)): @@ -5543,8 +5805,9 @@ async def update( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluation_taxonomies_update_request( + _request = build_memory_stores_update_request( name=name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -5570,65 +5833,37 @@ async def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @distributed_trace_async + async def get( + self, name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Retrieve a memory store. -class EvaluatorsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`evaluators` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list_versions( - self, - name: str, - *, - type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, - limit: Optional[int] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.EvaluatorVersion"]: - """List all versions of the given evaluator. - - :param name: The name of the resource. Required. + :param name: The name of the memory store to retrieve. Required. :type name: str - :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one - of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default - value is None. - :paramtype type: str or str or str or str - :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the default is 20. Default value is None. - :paramtype limit: int - :return: An iterator like instance of EvaluatorVersion - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluatorVersion] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) - error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5637,152 +5872,126 @@ def list_versions( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: - - _request = build_evaluators_list_versions_request( - name=name, - type=type, - limit=limit, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) - return _request + _request = build_memory_stores_get_request( + name=name, + foundry_features=foundry_features, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - async def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - async def get_next(next_link=None): - _request = prepare_request(next_link) + response = pipeline_response.http_response - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, ) - response = pipeline_response.http_response + raise HttpResponseError(response=response, model=error) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore - return AsyncItemPaged(get_next, extract_data) + return deserialized # type: ignore @distributed_trace - def list_latest_versions( + def list( self, *, - type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], limit: Optional[int] = None, + order: Optional[Union[str, _models.PageOrder]] = None, + before: Optional[str] = None, **kwargs: Any - ) -> AsyncItemPaged["_models.EvaluatorVersion"]: - """List the latest version of each evaluator. + ) -> AsyncItemPaged["_models.MemoryStoreDetails"]: + """List all memory stores. - :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one - of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default - value is None. - :paramtype type: str or str or str or str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the default is 20. Default value is None. + 100, and the + default is 20. Default value is None. :paramtype limit: int - :return: An iterator like instance of EvaluatorVersion - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.EvaluatorVersion] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_evaluators_list_latest_versions_request( - type=type, - limit=limit, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for + ascending order and``desc`` + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder + :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your + place in the list. + For instance, if you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page of the list. + Default value is None. + :paramtype before: str + :return: An iterator like instance of MemoryStoreDetails + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.MemoryStoreDetails] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.MemoryStoreDetails]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(_continuation_token=None): + _request = build_memory_stores_list_request( + foundry_features=foundry_features, + limit=limit, + order=order, + after=_continuation_token, + before=before, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.MemoryStoreDetails], deserialized.get("data", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + return deserialized.get("last_id") or None, AsyncList(list_of_elem) - async def get_next(next_link=None): - _request = prepare_request(next_link) + async def get_next(_continuation_token=None): + _request = prepare_request(_continuation_token) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access @@ -5792,23 +6001,29 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.EvaluatorVersion: - """Get the specific version of the EvaluatorVersion. The service returns 404 Not Found error if - the EvaluatorVersion does not exist. + async def delete( + self, name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any + ) -> _models.DeleteMemoryStoreResult: + """Delete a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store to delete. Required. :type name: str - :param version: The specific version id of the EvaluatorVersion to retrieve. Required. - :type version: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :return: DeleteMemoryStoreResult. The DeleteMemoryStoreResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DeleteMemoryStoreResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5822,11 +6037,11 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.E _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) + cls: ClsType[_models.DeleteMemoryStoreResult] = kwargs.pop("cls", None) - _request = build_evaluators_get_version_request( + _request = build_memory_stores_delete_request( name=name, - version=version, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5850,29 +6065,148 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.E except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluatorVersion, response.json()) + deserialized = _deserialize(_models.DeleteMemoryStoreResult, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @overload + async def search_memories( + self, + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str, + content_type: str = "application/json", + items: Optional[List[_models.InputItem]] = None, + previous_search_id: Optional[str] = None, + options: Optional[_models.MemorySearchOptions] = None, + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. + + :param name: The name of the memory store to search. Required. + :type name: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword items: Items for which to search for relevant memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.InputItem] + :keyword previous_search_id: The unique ID of the previous search request, enabling incremental + memory search from where the last operation left off. Default value is None. + :paramtype previous_search_id: str + :keyword options: Memory search options. Default value is None. + :paramtype options: ~azure.ai.projects.models.MemorySearchOptions + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def search_memories( + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. + + :param name: The name of the memory store to search. Required. + :type name: str + :param body: Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def search_memories( + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. + + :param name: The name of the memory store to search. Required. + :type name: str + :param body: Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace_async - async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: - """Delete the specific version of the EvaluatorVersion. The service returns 204 No Content if the - EvaluatorVersion was deleted successfully or if the EvaluatorVersion does not exist. + async def search_memories( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, + items: Optional[List[_models.InputItem]] = None, + previous_search_id: Optional[str] = None, + options: Optional[_models.MemorySearchOptions] = None, + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. - :param name: The name of the resource. Required. + :param name: The name of the memory store to search. Required. :type name: str - :param version: The version of the EvaluatorVersion to delete. Required. - :type version: str - :return: None - :rtype: None + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword items: Items for which to search for relevant memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.InputItem] + :keyword previous_search_id: The unique ID of the previous search request, enabling incremental + memory search from where the last operation left off. Default value is None. + :paramtype previous_search_id: str + :keyword options: Memory search options. Default value is None. + :paramtype options: ~azure.ai.projects.models.MemorySearchOptions + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5883,15 +6217,122 @@ async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MemoryStoreSearchResult] = kwargs.pop("cls", None) + + if body is _Unset: + if scope is _Unset: + raise TypeError("missing required argument: scope") + body = { + "items": items, + "options": options, + "previous_search_id": previous_search_id, + "scope": scope, + } + body = {k: v for k, v in body.items() if v is not None} + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_memory_stores_search_memories_request( + name=name, + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.MemoryStoreSearchResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _update_memories_initial( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, + items: Optional[List[_models.InputItem]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - cls: ClsType[None] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - _request = build_evaluators_delete_version_request( + if body is _Unset: + if scope is _Unset: + raise TypeError("missing required argument: scope") + body = { + "items": items, + "previous_update_id": previous_update_id, + "scope": scope, + "update_delay": update_delay, + } + body = {k: v for k, v in body.items() if v is not None} + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_memory_stores_update_memories_request( name=name, - version=version, + foundry_features=foundry_features, + content_type=content_type, api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) @@ -5900,386 +6341,283 @@ async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [204]: + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload - async def create_version( + async def _begin_update_memories( self, name: str, - evaluator_version: _models.EvaluatorVersion, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str, content_type: str = "application/json", + items: Optional[List[_models.InputItem]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. - - :param name: The name of the resource. Required. - :type name: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ - + ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... @overload - async def create_version( - self, name: str, evaluator_version: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. - - :param name: The name of the resource. Required. - :type name: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ - + async def _begin_update_memories( + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... @overload - async def create_version( - self, name: str, evaluator_version: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. - - :param name: The name of the resource. Required. - :type name: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ + async def _begin_update_memories( + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... @distributed_trace_async - async def create_version( - self, name: str, evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. + async def _begin_update_memories( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, + items: Optional[List[_models.InputItem]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any + ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: + """Update memory store with conversation memories. - :param name: The name of the resource. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param evaluator_version: Evaluator resource. Is one of the following types: EvaluatorVersion, - JSON, IO[bytes] Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword items: Conversation items from which to extract memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.InputItem] + :keyword previous_update_id: The unique ID of the previous update request, enabling incremental + memory updates from where the last operation left off. Default value is None. + :paramtype previous_update_id: str + :keyword update_delay: Timeout period before processing the memory update in seconds. + If a new update request is received during this period, it will cancel the current request and + reset the timeout. + Set to 0 to immediately trigger the update without delay. + Defaults to 300 (5 minutes). Default value is None. + :paramtype update_delay: int + :return: An instance of AsyncLROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.ai.projects.models.MemoryStoreUpdateCompletedResult] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreUpdateCompletedResult] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_memories_initial( + name=name, + body=body, + foundry_features=foundry_features, + scope=scope, + items=items, + previous_update_id=previous_update_id, + update_delay=update_delay, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) - content_type = content_type or "application/json" - _content = None - if isinstance(evaluator_version, (IOBase, bytes)): - _content = evaluator_version - else: - _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Operation-Location"] = self._deserialize( + "str", response.headers.get("Operation-Location") + ) + + deserialized = _deserialize(_models.MemoryStoreUpdateCompletedResult, response.json().get("result", {})) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized - _request = build_evaluators_create_version_request( - name=name, - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if _stream: - deserialized = response.iter_bytes() + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: - deserialized = _deserialize(_models.EvaluatorVersion, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @overload - async def update_version( + async def delete_scope( self, name: str, - version: str, - evaluator_version: _models.EvaluatorVersion, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories to delete, such as a + user ID. Required. + :paramtype scope: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def update_version( - self, name: str, version: str, evaluator_version: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + async def delete_scope( + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: JSON + :param body: Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def update_version( + async def delete_scope( self, name: str, - version: str, - evaluator_version: IO[bytes], + body: IO[bytes], *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: IO[bytes] + :param body: Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def update_version( + async def delete_scope( self, name: str, - version: str, - evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Is one of the following types: EvaluatorVersion, - JSON, IO[bytes] Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _content = None - if isinstance(evaluator_version, (IOBase, bytes)): - _content = evaluator_version - else: - _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - - _request = build_evaluators_update_version_request( - name=name, - version=version, - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.EvaluatorVersion, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - -class InsightsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.aio.AIProjectClient`'s - :attr:`insights` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def generate( - self, insight: _models.Insight, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Required. - :type insight: ~azure.ai.projects.models.Insight - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def generate( - self, insight: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Required. - :type insight: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def generate( - self, insight: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Required. - :type insight: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: Any) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Is one of the following types: Insight, JSON, IO[bytes] Required. - :type insight: ~azure.ai.projects.models.Insight or JSON or IO[bytes] - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories to delete, such as a + user ID. Required. + :paramtype scope: str + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -6294,16 +6632,23 @@ async def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwa _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Insight] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreDeleteScopeResult] = kwargs.pop("cls", None) + if body is _Unset: + if scope is _Unset: + raise TypeError("missing required argument: scope") + body = {"scope": scope} + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None - if isinstance(insight, (IOBase, bytes)): - _content = insight + if isinstance(body, (IOBase, bytes)): + _content = body else: - _content = json.dumps(insight, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_insights_generate_request( + _request = build_memory_stores_delete_scope_request( + name=name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -6322,36 +6667,55 @@ async def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwa response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: if _stream: try: await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Insight, response.json()) + deserialized = _deserialize(_models.MemoryStoreDeleteScopeResult, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + +class RedTeamsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`red_teams` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace_async - async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> _models.Insight: - """Get a specific insight by Id. + async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: + """Get a redteam by name. - :param id: The unique identifier for the insights report. Required. - :type id: str - :keyword include_coordinates: Whether to include coordinates for visualization in the response. - Defaults to false. Default value is None. - :paramtype include_coordinates: bool - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight + :param name: Identifier of the red team run. Required. + :type name: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -6365,11 +6729,10 @@ async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kw _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Insight] = kwargs.pop("cls", None) + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) - _request = build_insights_get_request( - id=id, - include_coordinates=include_coordinates, + _request = build_red_teams_get_request( + name=name, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6403,7 +6766,7 @@ async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kw if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Insight, response.json()) + deserialized = _deserialize(_models.RedTeam, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -6411,38 +6774,17 @@ async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kw return deserialized # type: ignore @distributed_trace - def list( - self, - *, - type: Optional[Union[str, _models.InsightType]] = None, - eval_id: Optional[str] = None, - run_id: Optional[str] = None, - agent_name: Optional[str] = None, - include_coordinates: Optional[bool] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.Insight"]: - """List all insights in reverse chronological order (newest first). + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.RedTeam"]: + """List a redteam by name. - :keyword type: Filter by the type of analysis. Known values are: "EvaluationRunClusterInsight", - "AgentClusterInsight", and "EvaluationComparison". Default value is None. - :paramtype type: str or ~azure.ai.projects.models.InsightType - :keyword eval_id: Filter by the evaluation ID. Default value is None. - :paramtype eval_id: str - :keyword run_id: Filter by the evaluation run ID. Default value is None. - :paramtype run_id: str - :keyword agent_name: Filter by the agent name. Default value is None. - :paramtype agent_name: str - :keyword include_coordinates: Whether to include coordinates for visualization in the response. - Defaults to false. Default value is None. - :paramtype include_coordinates: bool - :return: An iterator like instance of Insight - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.Insight] + :return: An iterator like instance of RedTeam + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.models.RedTeam] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Insight]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.RedTeam]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -6455,12 +6797,7 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_insights_list_request( - type=type, - eval_id=eval_id, - run_id=run_id, - agent_name=agent_name, - include_coordinates=include_coordinates, + _request = build_red_teams_list_request( api_version=self._config.api_version, headers=_headers, params=_params, @@ -6496,7 +6833,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Insight], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.RedTeam], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -6518,6 +6855,162 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) + @overload + async def create( + self, + red_team: _models.RedTeam, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: ~azure.ai.projects.models.RedTeam + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + red_team: JSON, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + red_team: IO[bytes], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create( + self, + red_team: Union[_models.RedTeam, JSON, IO[bytes]], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Is one of the following types: RedTeam, JSON, IO[bytes] + Required. + :type red_team: ~azure.ai.projects.models.RedTeam or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(red_team, (IOBase, bytes)): + _content = red_team + else: + _content = json.dumps(red_team, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_red_teams_create_request( + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RedTeam, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + class SchedulesOperations: """ @@ -6738,16 +7231,16 @@ async def get_next(next_link=None): @overload async def create_or_update( - self, id: str, schedule: _models.Schedule, *, content_type: str = "application/json", **kwargs: Any + self, id: str, schedule: _models.Schedule, *, content_type: str = "application/merge-patch+json", **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Required. + :param schedule: The resource instance. Required. :type schedule: ~azure.ai.projects.models.Schedule :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -6756,16 +7249,16 @@ async def create_or_update( @overload async def create_or_update( - self, id: str, schedule: JSON, *, content_type: str = "application/json", **kwargs: Any + self, id: str, schedule: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Required. + :param schedule: The resource instance. Required. :type schedule: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -6774,16 +7267,16 @@ async def create_or_update( @overload async def create_or_update( - self, id: str, schedule: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, id: str, schedule: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Required. + :param schedule: The resource instance. Required. :type schedule: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -6794,12 +7287,12 @@ async def create_or_update( async def create_or_update( self, id: str, schedule: Union[_models.Schedule, JSON, IO[bytes]], **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Is one of the following types: Schedule, JSON, IO[bytes] - Required. + :param schedule: The resource instance. Is one of the following types: Schedule, JSON, + IO[bytes] Required. :type schedule: ~azure.ai.projects.models.Schedule or JSON or IO[bytes] :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -6819,7 +7312,7 @@ async def create_or_update( content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) - content_type = content_type or "application/json" + content_type = content_type or "application/merge-patch+json" _content = None if isinstance(schedule, (IOBase, bytes)): _content = schedule @@ -6855,24 +7348,39 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + if _stream: deserialized = response.iter_bytes() else: deserialized = _deserialize(_models.Schedule, response.json()) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @distributed_trace_async - async def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.ScheduleRun: + async def get_run( + self, + schedule_id: str, + run_id: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + **kwargs: Any + ) -> _models.ScheduleRun: """Get a schedule run by id. - :param schedule_id: Identifier of the schedule. Required. + :param schedule_id: The unique identifier of the schedule. Required. :type schedule_id: str - :param run_id: Identifier of the schedule run. Required. + :param run_id: The unique identifier of the schedule run. Required. :type run_id: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW :return: ScheduleRun. The ScheduleRun is compatible with MutableMapping :rtype: ~azure.ai.projects.models.ScheduleRun :raises ~azure.core.exceptions.HttpResponseError: @@ -6893,6 +7401,7 @@ async def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models _request = build_schedules_get_run_request( schedule_id=schedule_id, run_id=run_id, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6916,7 +7425,11 @@ async def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py index 4f1343541de5..564645f1e796 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py @@ -7,12 +7,13 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import Union, Optional, Any, List, overload, IO, cast +from typing import Union, Optional, Any, List, overload, IO, cast, Literal from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.polling import AsyncNoPolling from azure.core.utils import case_insensitive_dict from ... import models as _models from ...models import ( + FoundryFeaturesOptInKeys, MemoryStoreOperationUsage, ResponseUsageInputTokensDetails, ResponseUsageOutputTokensDetails, @@ -32,6 +33,7 @@ async def begin_update_memories( self, name: str, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], scope: str, content_type: str = "application/json", items: Optional[List[_models.InputItem]] = None, @@ -43,6 +45,12 @@ async def begin_update_memories( :param name: The name of the memory store to update. Required. :type name: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. Required. :paramtype scope: str @@ -69,7 +77,13 @@ async def begin_update_memories( @overload async def begin_update_memories( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any, ) -> AsyncUpdateMemoriesLROPoller: """Update memory store with conversation memories. @@ -77,6 +91,9 @@ async def begin_update_memories( :type name: str :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -89,7 +106,13 @@ async def begin_update_memories( @overload async def begin_update_memories( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any, ) -> AsyncUpdateMemoriesLROPoller: """Update memory store with conversation memories. @@ -97,6 +120,9 @@ async def begin_update_memories( :type name: str :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -109,15 +135,16 @@ async def begin_update_memories( @distributed_trace_async @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], + method_added_on="v1", + params_added_on={"v1": ["api_version", "name", "content_type", "accept"]}, + api_versions_list=["v1"], ) async def begin_update_memories( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], scope: str = _Unset, items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, @@ -130,6 +157,9 @@ async def begin_update_memories( :type name: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. Required. :paramtype scope: str @@ -160,6 +190,7 @@ async def begin_update_memories( cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_memories_initial( + foundry_features=foundry_features, name=name, body=body, scope=scope, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index 2537d1bd8040..40698214e0f0 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -16,30 +16,25 @@ from ._models import ( # type: ignore A2APreviewTool, AISearchIndexResource, + AgentClusterInsightRequest, AgentClusterInsightResult, - AgentClusterInsightsRequest, AgentDefinition, AgentDetails, - AgentId, AgentObjectVersions, - AgentReference, AgentTaxonomyInput, AgentVersionDetails, - AgenticIdentityCredentials, + AgenticIdentityPreviewCredentials, Annotation, ApiErrorResponse, ApiKeyCredentials, - ApplyPatchCreateFileOperation, ApplyPatchCreateFileOperationParam, - ApplyPatchDeleteFileOperation, ApplyPatchDeleteFileOperationParam, - ApplyPatchFileOperation, ApplyPatchOperationParam, ApplyPatchToolParam, - ApplyPatchUpdateFileOperation, ApplyPatchUpdateFileOperationParam, ApproximateLocation, AzureAIAgentTarget, + AzureAIModelTarget, AzureAISearchIndex, AzureAISearchTool, AzureAISearchToolResource, @@ -83,7 +78,6 @@ ContainerFileCitationBody, ContinuousEvaluationRuleAction, CosmosDBIndex, - CreatedBy, CronTrigger, CustomCredential, CustomGrammarFormatParam, @@ -104,18 +98,18 @@ EmbeddingConfiguration, EntraIDCredentials, Error, - EvalCompareReport, EvalResult, EvalRunResultCompareItem, EvalRunResultComparison, EvalRunResultSummary, - EvaluationComparisonRequest, + EvaluationComparisonInsightRequest, + EvaluationComparisonInsightResult, EvaluationResultSample, EvaluationRule, EvaluationRuleAction, EvaluationRuleFilter, + EvaluationRunClusterInsightRequest, EvaluationRunClusterInsightResult, - EvaluationRunClusterInsightsRequest, EvaluationScheduleTask, EvaluationTaxonomy, EvaluationTaxonomyInput, @@ -134,15 +128,10 @@ FunctionAndCustomToolCallOutputInputFileContent, FunctionAndCustomToolCallOutputInputImageContent, FunctionAndCustomToolCallOutputInputTextContent, - FunctionShellAction, FunctionShellActionParam, - FunctionShellCallOutputContent, FunctionShellCallOutputContentParam, - FunctionShellCallOutputExitOutcome, FunctionShellCallOutputExitOutcomeParam, - FunctionShellCallOutputOutcome, FunctionShellCallOutputOutcomeParam, - FunctionShellCallOutputTimeoutOutcome, FunctionShellCallOutputTimeoutOutcomeParam, FunctionShellToolParam, FunctionTool, @@ -150,7 +139,6 @@ HourlyRecurrenceSchedule, HumanEvaluationRuleAction, HybridSearchOptions, - ImageBasedHostedAgentDefinition, ImageGenTool, ImageGenToolInputImageMask, Index, @@ -184,7 +172,6 @@ InputItemOutputMessage, InputItemReasoningItem, InputItemWebSearchToolCall, - InputMessageResource, InputTextContentParam, Insight, InsightCluster, @@ -196,26 +183,6 @@ InsightSummary, InsightsMetadata, ItemReferenceParam, - ItemResource, - ItemResourceApplyPatchToolCall, - ItemResourceApplyPatchToolCallOutput, - ItemResourceCodeInterpreterToolCall, - ItemResourceComputerToolCall, - ItemResourceComputerToolCallOutputResource, - ItemResourceFileSearchToolCall, - ItemResourceFunctionShellCall, - ItemResourceFunctionShellCallOutput, - ItemResourceFunctionToolCallOutputResource, - ItemResourceFunctionToolCallResource, - ItemResourceImageGenToolCall, - ItemResourceLocalShellToolCall, - ItemResourceLocalShellToolCallOutput, - ItemResourceMcpApprovalRequest, - ItemResourceMcpApprovalResponseResource, - ItemResourceMcpListTools, - ItemResourceMcpToolCall, - ItemResourceOutputMessage, - ItemResourceWebSearchToolCall, KeyPressAction, LocalShellExecAction, LocalShellToolParam, @@ -232,7 +199,6 @@ MemorySearchItem, MemorySearchOptions, MemorySearchPreviewTool, - MemorySearchToolCallItemResource, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemoryStoreDefinition, @@ -245,10 +211,10 @@ MicrosoftFabricPreviewTool, ModelDeployment, ModelDeploymentSku, + ModelSamplingParams, MonthlyRecurrenceSchedule, Move, NoAuthenticationCredentials, - OAuthConsentRequestItemResource, OneTimeTrigger, OpenApiAnonymousAuthDetails, OpenApiAuthDetails, @@ -259,14 +225,13 @@ OpenApiProjectConnectionAuthDetails, OpenApiProjectConnectionSecurityScheme, OpenApiTool, - OutputContent, OutputMessageContent, OutputMessageContentOutputTextContent, OutputMessageContentRefusalContent, PendingUploadRequest, PendingUploadResponse, PromptAgentDefinition, - PromptAgentDefinitionText, + PromptAgentDefinitionTextOptions, PromptBasedEvaluatorDefinition, ProtocolVersionRecord, RaiConfig, @@ -286,9 +251,10 @@ Scroll, SharepointGroundingToolParameters, SharepointPreviewTool, + SpecificApplyPatchParam, + SpecificFunctionShellParam, StructuredInputDefinition, StructuredOutputDefinition, - StructuredOutputsItemResource, Summary, Target, TargetConfig, @@ -299,6 +265,17 @@ TextResponseFormatConfigurationResponseFormatText, TextResponseFormatJsonSchema, Tool, + ToolChoiceAllowed, + ToolChoiceCodeInterpreter, + ToolChoiceComputerUsePreview, + ToolChoiceCustom, + ToolChoiceFileSearch, + ToolChoiceFunction, + ToolChoiceImageGeneration, + ToolChoiceMCP, + ToolChoiceParam, + ToolChoiceWebSearchPreview, + ToolChoiceWebSearchPreview20250311, ToolDescription, ToolProjectConnection, TopLogProb, @@ -318,19 +295,16 @@ WebSearchTool, WebSearchToolFilters, WeeklyRecurrenceSchedule, - WorkflowActionOutputItemResource, WorkflowAgentDefinition, ) from ._enums import ( # type: ignore AgentKind, + AgentObjectType, AgentProtocol, AnnotationType, - ApplyPatchCallOutputStatus, ApplyPatchCallOutputStatusParam, - ApplyPatchCallStatus, ApplyPatchCallStatusParam, - ApplyPatchFileOperationType, ApplyPatchOperationParamType, AttackStrategy, AzureAISearchQueryType, @@ -338,7 +312,6 @@ ComputerActionType, ComputerEnvironment, ConnectionType, - ContainerLogKind, ContainerMemoryLimit, CredentialType, CustomToolParamFormatType, @@ -354,11 +327,11 @@ EvaluatorMetricDirection, EvaluatorMetricType, EvaluatorType, + FoundryFeaturesOptInKeys, FunctionAndCustomToolCallOutputType, FunctionCallItemStatus, FunctionShellCallItemStatus, FunctionShellCallOutputOutcomeParamType, - FunctionShellCallOutputOutcomeType, GrammarSyntax1, ImageDetail, IndexType, @@ -366,16 +339,14 @@ InputFidelity, InputItemType, InsightType, - ItemResourceType, - LocalShellCallStatus, MCPToolCallStatus, MemoryItemKind, MemoryOperationKind, MemoryStoreKind, + MemoryStoreObjectType, MemoryStoreUpdateStatus, OpenApiAuthType, OperationState, - OutputContentType, OutputMessageContentType, PageOrder, PendingUploadType, @@ -387,6 +358,7 @@ ScheduleTaskType, SearchContextSize, TextResponseFormatConfigurationType, + ToolChoiceParamType, ToolType, TreatmentEffectType, TriggerType, @@ -398,30 +370,25 @@ __all__ = [ "A2APreviewTool", "AISearchIndexResource", + "AgentClusterInsightRequest", "AgentClusterInsightResult", - "AgentClusterInsightsRequest", "AgentDefinition", "AgentDetails", - "AgentId", "AgentObjectVersions", - "AgentReference", "AgentTaxonomyInput", "AgentVersionDetails", - "AgenticIdentityCredentials", + "AgenticIdentityPreviewCredentials", "Annotation", "ApiErrorResponse", "ApiKeyCredentials", - "ApplyPatchCreateFileOperation", "ApplyPatchCreateFileOperationParam", - "ApplyPatchDeleteFileOperation", "ApplyPatchDeleteFileOperationParam", - "ApplyPatchFileOperation", "ApplyPatchOperationParam", "ApplyPatchToolParam", - "ApplyPatchUpdateFileOperation", "ApplyPatchUpdateFileOperationParam", "ApproximateLocation", "AzureAIAgentTarget", + "AzureAIModelTarget", "AzureAISearchIndex", "AzureAISearchTool", "AzureAISearchToolResource", @@ -465,7 +432,6 @@ "ContainerFileCitationBody", "ContinuousEvaluationRuleAction", "CosmosDBIndex", - "CreatedBy", "CronTrigger", "CustomCredential", "CustomGrammarFormatParam", @@ -486,18 +452,18 @@ "EmbeddingConfiguration", "EntraIDCredentials", "Error", - "EvalCompareReport", "EvalResult", "EvalRunResultCompareItem", "EvalRunResultComparison", "EvalRunResultSummary", - "EvaluationComparisonRequest", + "EvaluationComparisonInsightRequest", + "EvaluationComparisonInsightResult", "EvaluationResultSample", "EvaluationRule", "EvaluationRuleAction", "EvaluationRuleFilter", + "EvaluationRunClusterInsightRequest", "EvaluationRunClusterInsightResult", - "EvaluationRunClusterInsightsRequest", "EvaluationScheduleTask", "EvaluationTaxonomy", "EvaluationTaxonomyInput", @@ -516,15 +482,10 @@ "FunctionAndCustomToolCallOutputInputFileContent", "FunctionAndCustomToolCallOutputInputImageContent", "FunctionAndCustomToolCallOutputInputTextContent", - "FunctionShellAction", "FunctionShellActionParam", - "FunctionShellCallOutputContent", "FunctionShellCallOutputContentParam", - "FunctionShellCallOutputExitOutcome", "FunctionShellCallOutputExitOutcomeParam", - "FunctionShellCallOutputOutcome", "FunctionShellCallOutputOutcomeParam", - "FunctionShellCallOutputTimeoutOutcome", "FunctionShellCallOutputTimeoutOutcomeParam", "FunctionShellToolParam", "FunctionTool", @@ -532,7 +493,6 @@ "HourlyRecurrenceSchedule", "HumanEvaluationRuleAction", "HybridSearchOptions", - "ImageBasedHostedAgentDefinition", "ImageGenTool", "ImageGenToolInputImageMask", "Index", @@ -566,7 +526,6 @@ "InputItemOutputMessage", "InputItemReasoningItem", "InputItemWebSearchToolCall", - "InputMessageResource", "InputTextContentParam", "Insight", "InsightCluster", @@ -578,26 +537,6 @@ "InsightSummary", "InsightsMetadata", "ItemReferenceParam", - "ItemResource", - "ItemResourceApplyPatchToolCall", - "ItemResourceApplyPatchToolCallOutput", - "ItemResourceCodeInterpreterToolCall", - "ItemResourceComputerToolCall", - "ItemResourceComputerToolCallOutputResource", - "ItemResourceFileSearchToolCall", - "ItemResourceFunctionShellCall", - "ItemResourceFunctionShellCallOutput", - "ItemResourceFunctionToolCallOutputResource", - "ItemResourceFunctionToolCallResource", - "ItemResourceImageGenToolCall", - "ItemResourceLocalShellToolCall", - "ItemResourceLocalShellToolCallOutput", - "ItemResourceMcpApprovalRequest", - "ItemResourceMcpApprovalResponseResource", - "ItemResourceMcpListTools", - "ItemResourceMcpToolCall", - "ItemResourceOutputMessage", - "ItemResourceWebSearchToolCall", "KeyPressAction", "LocalShellExecAction", "LocalShellToolParam", @@ -614,7 +553,6 @@ "MemorySearchItem", "MemorySearchOptions", "MemorySearchPreviewTool", - "MemorySearchToolCallItemResource", "MemoryStoreDefaultDefinition", "MemoryStoreDefaultOptions", "MemoryStoreDefinition", @@ -627,10 +565,10 @@ "MicrosoftFabricPreviewTool", "ModelDeployment", "ModelDeploymentSku", + "ModelSamplingParams", "MonthlyRecurrenceSchedule", "Move", "NoAuthenticationCredentials", - "OAuthConsentRequestItemResource", "OneTimeTrigger", "OpenApiAnonymousAuthDetails", "OpenApiAuthDetails", @@ -641,14 +579,13 @@ "OpenApiProjectConnectionAuthDetails", "OpenApiProjectConnectionSecurityScheme", "OpenApiTool", - "OutputContent", "OutputMessageContent", "OutputMessageContentOutputTextContent", "OutputMessageContentRefusalContent", "PendingUploadRequest", "PendingUploadResponse", "PromptAgentDefinition", - "PromptAgentDefinitionText", + "PromptAgentDefinitionTextOptions", "PromptBasedEvaluatorDefinition", "ProtocolVersionRecord", "RaiConfig", @@ -668,9 +605,10 @@ "Scroll", "SharepointGroundingToolParameters", "SharepointPreviewTool", + "SpecificApplyPatchParam", + "SpecificFunctionShellParam", "StructuredInputDefinition", "StructuredOutputDefinition", - "StructuredOutputsItemResource", "Summary", "Target", "TargetConfig", @@ -681,6 +619,17 @@ "TextResponseFormatConfigurationResponseFormatText", "TextResponseFormatJsonSchema", "Tool", + "ToolChoiceAllowed", + "ToolChoiceCodeInterpreter", + "ToolChoiceComputerUsePreview", + "ToolChoiceCustom", + "ToolChoiceFileSearch", + "ToolChoiceFunction", + "ToolChoiceImageGeneration", + "ToolChoiceMCP", + "ToolChoiceParam", + "ToolChoiceWebSearchPreview", + "ToolChoiceWebSearchPreview20250311", "ToolDescription", "ToolProjectConnection", "TopLogProb", @@ -700,16 +649,13 @@ "WebSearchTool", "WebSearchToolFilters", "WeeklyRecurrenceSchedule", - "WorkflowActionOutputItemResource", "WorkflowAgentDefinition", "AgentKind", + "AgentObjectType", "AgentProtocol", "AnnotationType", - "ApplyPatchCallOutputStatus", "ApplyPatchCallOutputStatusParam", - "ApplyPatchCallStatus", "ApplyPatchCallStatusParam", - "ApplyPatchFileOperationType", "ApplyPatchOperationParamType", "AttackStrategy", "AzureAISearchQueryType", @@ -717,7 +663,6 @@ "ComputerActionType", "ComputerEnvironment", "ConnectionType", - "ContainerLogKind", "ContainerMemoryLimit", "CredentialType", "CustomToolParamFormatType", @@ -733,11 +678,11 @@ "EvaluatorMetricDirection", "EvaluatorMetricType", "EvaluatorType", + "FoundryFeaturesOptInKeys", "FunctionAndCustomToolCallOutputType", "FunctionCallItemStatus", "FunctionShellCallItemStatus", "FunctionShellCallOutputOutcomeParamType", - "FunctionShellCallOutputOutcomeType", "GrammarSyntax1", "ImageDetail", "IndexType", @@ -745,16 +690,14 @@ "InputFidelity", "InputItemType", "InsightType", - "ItemResourceType", - "LocalShellCallStatus", "MCPToolCallStatus", "MemoryItemKind", "MemoryOperationKind", "MemoryStoreKind", + "MemoryStoreObjectType", "MemoryStoreUpdateStatus", "OpenApiAuthType", "OperationState", - "OutputContentType", "OutputMessageContentType", "PageOrder", "PendingUploadType", @@ -766,6 +709,7 @@ "ScheduleTaskType", "SearchContextSize", "TextResponseFormatConfigurationType", + "ToolChoiceParamType", "ToolType", "TreatmentEffectType", "TriggerType", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 145ae30df168..15c6e9031732 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -14,69 +14,79 @@ class AgentKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of AgentKind.""" PROMPT = "prompt" + """PROMPT.""" HOSTED = "hosted" + """HOSTED.""" CONTAINER_APP = "container_app" + """CONTAINER_APP.""" WORKFLOW = "workflow" + """WORKFLOW.""" + + +class AgentObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AgentObjectType.""" + + AGENT = "agent" + """AGENT.""" + AGENT_VERSION = "agent.version" + """AGENT_VERSION.""" + AGENT_DELETED = "agent.deleted" + """AGENT_DELETED.""" + AGENT_VERSION_DELETED = "agent.version.deleted" + """AGENT_VERSION_DELETED.""" + AGENT_CONTAINER = "agent.container" + """AGENT_CONTAINER.""" class AgentProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of AgentProtocol.""" ACTIVITY_PROTOCOL = "activity_protocol" + """ACTIVITY_PROTOCOL.""" RESPONSES = "responses" + """RESPONSES.""" class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of AnnotationType.""" FILE_CITATION = "file_citation" + """FILE_CITATION.""" URL_CITATION = "url_citation" + """URL_CITATION.""" CONTAINER_FILE_CITATION = "container_file_citation" + """CONTAINER_FILE_CITATION.""" FILE_PATH = "file_path" - - -class ApplyPatchCallOutputStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ApplyPatchCallOutputStatus.""" - - COMPLETED = "completed" - FAILED = "failed" + """FILE_PATH.""" class ApplyPatchCallOutputStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Apply patch call output status.""" COMPLETED = "completed" + """COMPLETED.""" FAILED = "failed" - - -class ApplyPatchCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ApplyPatchCallStatus.""" - - IN_PROGRESS = "in_progress" - COMPLETED = "completed" + """FAILED.""" class ApplyPatchCallStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Apply patch call status.""" IN_PROGRESS = "in_progress" + """IN_PROGRESS.""" COMPLETED = "completed" - - -class ApplyPatchFileOperationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ApplyPatchFileOperationType.""" - - CREATE_FILE = "create_file" - DELETE_FILE = "delete_file" - UPDATE_FILE = "update_file" + """COMPLETED.""" class ApplyPatchOperationParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ApplyPatchOperationParamType.""" CREATE_FILE = "create_file" + """CREATE_FILE.""" DELETE_FILE = "delete_file" + """DELETE_FILE.""" UPDATE_FILE = "update_file" + """UPDATE_FILE.""" class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -154,116 +164,132 @@ class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Available query types for Azure AI Search tool.""" SIMPLE = "simple" - """Query type ``simple``""" + """Query type ``simple``.""" SEMANTIC = "semantic" - """Query type ``semantic``""" + """Query type ``semantic``.""" VECTOR = "vector" - """Query type ``vector``""" + """Query type ``vector``.""" VECTOR_SIMPLE_HYBRID = "vector_simple_hybrid" - """Query type ``vector_simple_hybrid``""" + """Query type ``vector_simple_hybrid``.""" VECTOR_SEMANTIC_HYBRID = "vector_semantic_hybrid" - """Query type ``vector_semantic_hybrid``""" + """Query type ``vector_semantic_hybrid``.""" class ClickButtonType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ClickButtonType.""" LEFT = "left" + """LEFT.""" RIGHT = "right" + """RIGHT.""" WHEEL = "wheel" + """WHEEL.""" BACK = "back" + """BACK.""" FORWARD = "forward" + """FORWARD.""" class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ComputerActionType.""" CLICK = "click" + """CLICK.""" DOUBLE_CLICK = "double_click" + """DOUBLE_CLICK.""" DRAG = "drag" + """DRAG.""" KEYPRESS = "keypress" + """KEYPRESS.""" MOVE = "move" + """MOVE.""" SCREENSHOT = "screenshot" + """SCREENSHOT.""" SCROLL = "scroll" + """SCROLL.""" TYPE = "type" + """TYPE.""" WAIT = "wait" + """WAIT.""" class ComputerEnvironment(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ComputerEnvironment.""" WINDOWS = "windows" + """WINDOWS.""" MAC = "mac" + """MAC.""" LINUX = "linux" + """LINUX.""" UBUNTU = "ubuntu" + """UBUNTU.""" BROWSER = "browser" + """BROWSER.""" class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The Type (or category) of the connection.""" AZURE_OPEN_AI = "AzureOpenAI" - """Azure OpenAI Service""" + """Azure OpenAI Service.""" AZURE_BLOB_STORAGE = "AzureBlob" - """Azure Blob Storage, with specified container""" + """Azure Blob Storage, with specified container.""" AZURE_STORAGE_ACCOUNT = "AzureStorageAccount" - """Azure Blob Storage, with container not specified (used by Agents)""" + """Azure Blob Storage, with container not specified (used by Agents).""" AZURE_AI_SEARCH = "CognitiveSearch" - """Azure AI Search""" + """Azure AI Search.""" COSMOS_DB = "CosmosDB" - """CosmosDB""" + """CosmosDB.""" API_KEY = "ApiKey" - """Generic connection that uses API Key authentication""" + """Generic connection that uses API Key authentication.""" APPLICATION_CONFIGURATION = "AppConfig" - """Application Configuration""" + """Application Configuration.""" APPLICATION_INSIGHTS = "AppInsights" - """Application Insights""" + """Application Insights.""" CUSTOM = "CustomKeys" - """Custom Keys""" - REMOTE_TOOL = "RemoteTool" - """Remote tool""" - - -class ContainerLogKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of logs to stream from a container.""" - - CONSOLE = "console" - """Console logs from the container.""" - SYSTEM = "system" - """System logs from the container.""" + """Custom Keys.""" + REMOTE_TOOL = "RemoteTool_Preview" + """Remote tool.""" class ContainerMemoryLimit(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ContainerMemoryLimit.""" ENUM_1_G = "1g" + """1_G.""" ENUM_4_G = "4g" + """4_G.""" ENUM_16_G = "16g" + """16_G.""" ENUM_64_G = "64g" + """64_G.""" class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The credential type used by the connection.""" API_KEY = "ApiKey" - """API Key credential""" + """API Key credential.""" ENTRA_ID = "AAD" - """Entra ID credential (formerly known as AAD)""" + """Entra ID credential (formerly known as AAD).""" SAS = "SAS" - """Shared Access Signature (SAS) credential""" + """Shared Access Signature (SAS) credential.""" CUSTOM = "CustomKeys" - """Custom credential""" + """Custom credential.""" NONE = "None" - """No credential""" - AGENTIC_IDENTITY = "AgenticIdentityToken" - """Agentic identity credential""" + """No credential.""" + AGENTIC_IDENTITY_PREVIEW = "AgenticIdentityToken_Preview" + """Agentic identity credential.""" class CustomToolParamFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of CustomToolParamFormatType.""" TEXT = "text" + """TEXT.""" GRAMMAR = "grammar" + """GRAMMAR.""" class DatasetType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -298,15 +324,18 @@ class DeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of DeploymentType.""" MODEL_DEPLOYMENT = "ModelDeployment" - """Model deployment""" + """Model deployment.""" class DetailEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of DetailEnum.""" LOW = "low" + """LOW.""" HIGH = "high" + """HIGH.""" AUTO = "auto" + """AUTO.""" class EvaluationRuleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -331,7 +360,7 @@ class EvaluationTaxonomyInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of the evaluation taxonomy input.""" AGENT = "agent" - """Agent""" + """Agent.""" POLICY = "policy" """Policy.""" @@ -340,26 +369,26 @@ class EvaluatorCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The category of the evaluator.""" QUALITY = "quality" - """Quality""" + """Quality.""" SAFETY = "safety" - """Risk & Safety""" + """Risk & Safety.""" AGENTS = "agents" - """Agents""" + """Agents.""" class EvaluatorDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of evaluator definition.""" PROMPT = "prompt" - """Prompt-based definition""" + """Prompt-based definition.""" CODE = "code" - """Code-based definition""" + """Code-based definition.""" PROMPT_AND_CODE = "prompt_and_code" - """Prompt & Code Based definition""" + """Prompt & Code Based definition.""" SERVICE = "service" - """Service-based evaluator""" + """Service-based evaluator.""" OPENAI_GRADERS = "openai_graders" - """OpenAI graders""" + """OpenAI graders.""" class EvaluatorMetricDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -368,11 +397,11 @@ class EvaluatorMetricDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ INCREASE = "increase" - """It indicates a higher value is better for this metric""" + """It indicates a higher value is better for this metric.""" DECREASE = "decrease" - """It indicates a lower value is better for this metric""" + """It indicates a lower value is better for this metric.""" NEUTRAL = "neutral" - """It indicates no preference for this metric direction""" + """It indicates no preference for this metric direction.""" class EvaluatorMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -383,88 +412,119 @@ class EvaluatorMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): CONTINUOUS = "continuous" """Continuous metric representing values in a continuous range.""" BOOLEAN = "boolean" - """Boolean metric representing true/false values""" + """Boolean metric representing true/false values.""" class EvaluatorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of the evaluator.""" BUILT_IN = "builtin" - """Built-in evaluator (Microsoft provided)""" + """Built-in evaluator (Microsoft provided).""" CUSTOM = "custom" - """Custom evaluator""" + """Custom evaluator.""" + + +class FoundryFeaturesOptInKeys(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of FoundryFeaturesOptInKeys.""" + + CONTAINER_AGENTS_V1_PREVIEW = "ContainerAgents=V1Preview" + """CONTAINER_AGENTS_V1_PREVIEW.""" + HOSTED_AGENTS_V1_PREVIEW = "HostedAgents=V1Preview" + """HOSTED_AGENTS_V1_PREVIEW.""" + WORKFLOW_AGENTS_V1_PREVIEW = "WorkflowAgents=V1Preview" + """WORKFLOW_AGENTS_V1_PREVIEW.""" + EVALUATIONS_V1_PREVIEW = "Evaluations=V1Preview" + """EVALUATIONS_V1_PREVIEW.""" + RED_TEAMS_V1_PREVIEW = "RedTeams=V1Preview" + """RED_TEAMS_V1_PREVIEW.""" + INSIGHTS_V1_PREVIEW = "Insights=V1Preview" + """INSIGHTS_V1_PREVIEW.""" + MEMORY_STORES_V1_PREVIEW = "MemoryStores=V1Preview" + """MEMORY_STORES_V1_PREVIEW.""" class FunctionAndCustomToolCallOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of FunctionAndCustomToolCallOutputType.""" INPUT_TEXT = "input_text" + """INPUT_TEXT.""" INPUT_IMAGE = "input_image" + """INPUT_IMAGE.""" INPUT_FILE = "input_file" + """INPUT_FILE.""" class FunctionCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of FunctionCallItemStatus.""" IN_PROGRESS = "in_progress" + """IN_PROGRESS.""" COMPLETED = "completed" + """COMPLETED.""" INCOMPLETE = "incomplete" + """INCOMPLETE.""" class FunctionShellCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Shell call status.""" IN_PROGRESS = "in_progress" + """IN_PROGRESS.""" COMPLETED = "completed" + """COMPLETED.""" INCOMPLETE = "incomplete" + """INCOMPLETE.""" class FunctionShellCallOutputOutcomeParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of FunctionShellCallOutputOutcomeParamType.""" TIMEOUT = "timeout" + """TIMEOUT.""" EXIT = "exit" - - -class FunctionShellCallOutputOutcomeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of FunctionShellCallOutputOutcomeType.""" - - TIMEOUT = "timeout" - EXIT = "exit" + """EXIT.""" class GrammarSyntax1(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of GrammarSyntax1.""" LARK = "lark" + """LARK.""" REGEX = "regex" + """REGEX.""" class ImageDetail(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ImageDetail.""" LOW = "low" + """LOW.""" HIGH = "high" + """HIGH.""" AUTO = "auto" + """AUTO.""" class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of IndexType.""" AZURE_SEARCH = "AzureSearch" - """Azure search""" + """Azure search.""" COSMOS_DB = "CosmosDBNoSqlVectorStore" - """CosmosDB""" + """CosmosDB.""" MANAGED_AZURE_SEARCH = "ManagedAzureSearch" - """Managed Azure Search""" + """Managed Azure Search.""" class InputContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of InputContentType.""" INPUT_TEXT = "input_text" + """INPUT_TEXT.""" INPUT_IMAGE = "input_image" + """INPUT_IMAGE.""" INPUT_FILE = "input_file" + """INPUT_FILE.""" class InputFidelity(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -474,37 +534,64 @@ class InputFidelity(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ HIGH = "high" + """HIGH.""" LOW = "low" + """LOW.""" class InputItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of InputItemType.""" MESSAGE = "message" + """MESSAGE.""" OUTPUT_MESSAGE = "output_message" + """OUTPUT_MESSAGE.""" FILE_SEARCH_CALL = "file_search_call" + """FILE_SEARCH_CALL.""" COMPUTER_CALL = "computer_call" + """COMPUTER_CALL.""" COMPUTER_CALL_OUTPUT = "computer_call_output" + """COMPUTER_CALL_OUTPUT.""" WEB_SEARCH_CALL = "web_search_call" + """WEB_SEARCH_CALL.""" FUNCTION_CALL = "function_call" + """FUNCTION_CALL.""" FUNCTION_CALL_OUTPUT = "function_call_output" + """FUNCTION_CALL_OUTPUT.""" REASONING = "reasoning" + """REASONING.""" COMPACTION = "compaction" + """COMPACTION.""" IMAGE_GENERATION_CALL = "image_generation_call" + """IMAGE_GENERATION_CALL.""" CODE_INTERPRETER_CALL = "code_interpreter_call" + """CODE_INTERPRETER_CALL.""" LOCAL_SHELL_CALL = "local_shell_call" + """LOCAL_SHELL_CALL.""" LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" + """LOCAL_SHELL_CALL_OUTPUT.""" SHELL_CALL = "shell_call" + """SHELL_CALL.""" SHELL_CALL_OUTPUT = "shell_call_output" + """SHELL_CALL_OUTPUT.""" APPLY_PATCH_CALL = "apply_patch_call" + """APPLY_PATCH_CALL.""" APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output" + """APPLY_PATCH_CALL_OUTPUT.""" MCP_LIST_TOOLS = "mcp_list_tools" + """MCP_LIST_TOOLS.""" MCP_APPROVAL_REQUEST = "mcp_approval_request" + """MCP_APPROVAL_REQUEST.""" MCP_APPROVAL_RESPONSE = "mcp_approval_response" + """MCP_APPROVAL_RESPONSE.""" MCP_CALL = "mcp_call" + """MCP_CALL.""" CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output" + """CUSTOM_TOOL_CALL_OUTPUT.""" CUSTOM_TOOL_CALL = "custom_tool_call" + """CUSTOM_TOOL_CALL.""" ITEM_REFERENCE = "item_reference" + """ITEM_REFERENCE.""" class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -518,51 +605,19 @@ class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Evaluation Comparison.""" -class ItemResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ItemResourceType.""" - - MESSAGE = "message" - OUTPUT_MESSAGE = "output_message" - FILE_SEARCH_CALL = "file_search_call" - COMPUTER_CALL = "computer_call" - COMPUTER_CALL_OUTPUT = "computer_call_output" - WEB_SEARCH_CALL = "web_search_call" - FUNCTION_CALL = "function_call" - FUNCTION_CALL_OUTPUT = "function_call_output" - IMAGE_GENERATION_CALL = "image_generation_call" - CODE_INTERPRETER_CALL = "code_interpreter_call" - LOCAL_SHELL_CALL = "local_shell_call" - LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" - SHELL_CALL = "shell_call" - SHELL_CALL_OUTPUT = "shell_call_output" - APPLY_PATCH_CALL = "apply_patch_call" - APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output" - MCP_LIST_TOOLS = "mcp_list_tools" - MCP_APPROVAL_REQUEST = "mcp_approval_request" - MCP_APPROVAL_RESPONSE = "mcp_approval_response" - MCP_CALL = "mcp_call" - STRUCTURED_OUTPUTS = "structured_outputs" - WORKFLOW_ACTION = "workflow_action" - MEMORY_SEARCH_CALL = "memory_search_call" - OAUTH_CONSENT_REQUEST = "oauth_consent_request" - - -class LocalShellCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of LocalShellCallStatus.""" - - IN_PROGRESS = "in_progress" - COMPLETED = "completed" - INCOMPLETE = "incomplete" - - class MCPToolCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of MCPToolCallStatus.""" IN_PROGRESS = "in_progress" + """IN_PROGRESS.""" COMPLETED = "completed" + """COMPLETED.""" INCOMPLETE = "incomplete" + """INCOMPLETE.""" CALLING = "calling" + """CALLING.""" FAILED = "failed" + """FAILED.""" class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -592,14 +647,30 @@ class MemoryStoreKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The default memory store implementation.""" +class MemoryStoreObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of MemoryStoreObjectType.""" + + MEMORY_STORE = "memory_store" + """MEMORY_STORE.""" + MEMORY_STORE_DELETED = "memory_store.deleted" + """MEMORY_STORE_DELETED.""" + MEMORY_STORE_SCOPE_DELETED = "memory_store.scope.deleted" + """MEMORY_STORE_SCOPE_DELETED.""" + + class MemoryStoreUpdateStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Status of a memory store update operation.""" QUEUED = "queued" + """QUEUED.""" IN_PROGRESS = "in_progress" + """IN_PROGRESS.""" COMPLETED = "completed" + """COMPLETED.""" FAILED = "failed" + """FAILED.""" SUPERSEDED = "superseded" + """SUPERSEDED.""" class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -611,8 +682,11 @@ class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ ANONYMOUS = "anonymous" + """ANONYMOUS.""" PROJECT_CONNECTION = "project_connection" + """PROJECT_CONNECTION.""" MANAGED_IDENTITY = "managed_identity" + """MANAGED_IDENTITY.""" class OperationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -630,26 +704,22 @@ class OperationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The operation has been canceled by the user.""" -class OutputContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of OutputContentType.""" - - OUTPUT_TEXT = "output_text" - REFUSAL = "refusal" - REASONING_TEXT = "reasoning_text" - - class OutputMessageContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of OutputMessageContentType.""" OUTPUT_TEXT = "output_text" + """OUTPUT_TEXT.""" REFUSAL = "refusal" + """REFUSAL.""" class PageOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of PageOrder.""" ASC = "asc" + """ASC.""" DESC = "desc" + """DESC.""" class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -665,7 +735,9 @@ class RankerVersionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of RankerVersionType.""" AUTO = "auto" + """AUTO.""" DEFAULT2024_11_15 = "default-2024-11-15" + """DEFAULT2024_11_15.""" class RecurrenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -741,44 +813,102 @@ class SearchContextSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of SearchContextSize.""" LOW = "low" + """LOW.""" MEDIUM = "medium" + """MEDIUM.""" HIGH = "high" + """HIGH.""" class TextResponseFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of TextResponseFormatConfigurationType.""" TEXT = "text" + """TEXT.""" JSON_SCHEMA = "json_schema" + """JSON_SCHEMA.""" JSON_OBJECT = "json_object" + """JSON_OBJECT.""" + + +class ToolChoiceParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ToolChoiceParamType.""" + + ALLOWED_TOOLS = "allowed_tools" + """ALLOWED_TOOLS.""" + FUNCTION = "function" + """FUNCTION.""" + MCP = "mcp" + """MCP.""" + CUSTOM = "custom" + """CUSTOM.""" + APPLY_PATCH = "apply_patch" + """APPLY_PATCH.""" + SHELL = "shell" + """SHELL.""" + FILE_SEARCH = "file_search" + """FILE_SEARCH.""" + WEB_SEARCH_PREVIEW = "web_search_preview" + """WEB_SEARCH_PREVIEW.""" + COMPUTER_USE_PREVIEW = "computer_use_preview" + """COMPUTER_USE_PREVIEW.""" + WEB_SEARCH_PREVIEW2025_03_11 = "web_search_preview_2025_03_11" + """WEB_SEARCH_PREVIEW2025_03_11.""" + IMAGE_GENERATION = "image_generation" + """IMAGE_GENERATION.""" + CODE_INTERPRETER = "code_interpreter" + """CODE_INTERPRETER.""" class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ToolType.""" FUNCTION = "function" + """FUNCTION.""" FILE_SEARCH = "file_search" + """FILE_SEARCH.""" COMPUTER_USE_PREVIEW = "computer_use_preview" + """COMPUTER_USE_PREVIEW.""" WEB_SEARCH = "web_search" + """WEB_SEARCH.""" MCP = "mcp" + """MCP.""" CODE_INTERPRETER = "code_interpreter" + """CODE_INTERPRETER.""" IMAGE_GENERATION = "image_generation" + """IMAGE_GENERATION.""" LOCAL_SHELL = "local_shell" + """LOCAL_SHELL.""" SHELL = "shell" + """SHELL.""" CUSTOM = "custom" + """CUSTOM.""" WEB_SEARCH_PREVIEW = "web_search_preview" + """WEB_SEARCH_PREVIEW.""" APPLY_PATCH = "apply_patch" + """APPLY_PATCH.""" A2A_PREVIEW = "a2a_preview" + """A2A_PREVIEW.""" BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview" + """BING_CUSTOM_SEARCH_PREVIEW.""" BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview" + """BROWSER_AUTOMATION_PREVIEW.""" FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview" + """FABRIC_DATAAGENT_PREVIEW.""" SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview" + """SHAREPOINT_GROUNDING_PREVIEW.""" + MEMORY_SEARCH_PREVIEW = "memory_search_preview" + """MEMORY_SEARCH_PREVIEW.""" AZURE_AI_SEARCH = "azure_ai_search" + """AZURE_AI_SEARCH.""" AZURE_FUNCTION = "azure_function" + """AZURE_FUNCTION.""" BING_GROUNDING = "bing_grounding" + """BING_GROUNDING.""" CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs" + """CAPTURE_STRUCTURED_OUTPUTS.""" OPENAPI = "openapi" - MEMORY_SEARCH = "memory_search" + """OPENAPI.""" class TreatmentEffectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 148f0517a776..daca23dfb31e 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -14,8 +14,8 @@ from .._utils.model_base import Model as _Model, rest_discriminator, rest_field from ._enums import ( AgentKind, + AgentObjectType, AnnotationType, - ApplyPatchFileOperationType, ApplyPatchOperationParamType, ComputerActionType, CredentialType, @@ -27,22 +27,21 @@ EvaluatorDefinitionType, FunctionAndCustomToolCallOutputType, FunctionShellCallOutputOutcomeParamType, - FunctionShellCallOutputOutcomeType, IndexType, InputContentType, InputItemType, InsightType, - ItemResourceType, MemoryItemKind, MemoryStoreKind, + MemoryStoreObjectType, OpenApiAuthType, - OutputContentType, OutputMessageContentType, PendingUploadType, RecurrenceType, SampleType, ScheduleTaskType, TextResponseFormatConfigurationType, + ToolChoiceParamType, ToolType, TriggerType, ) @@ -66,8 +65,8 @@ class Tool(_Model): "web_search", "mcp", "code_interpreter", "image_generation", "local_shell", "shell", "custom", "web_search_preview", "apply_patch", "a2a_preview", "bing_custom_search_preview", "browser_automation_preview", "fabric_dataagent_preview", "sharepoint_grounding_preview", - "azure_ai_search", "azure_function", "bing_grounding", "capture_structured_outputs", "openapi", - and "memory_search". + "memory_search_preview", "azure_ai_search", "azure_function", "bing_grounding", + "capture_structured_outputs", and "openapi". :vartype type: str or ~azure.ai.projects.models.ToolType """ @@ -77,8 +76,8 @@ class Tool(_Model): \"web_search\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\", \"shell\", \"custom\", \"web_search_preview\", \"apply_patch\", \"a2a_preview\", \"bing_custom_search_preview\", \"browser_automation_preview\", \"fabric_dataagent_preview\", - \"sharepoint_grounding_preview\", \"azure_ai_search\", \"azure_function\", \"bing_grounding\", - \"capture_structured_outputs\", \"openapi\", and \"memory_search\".""" + \"sharepoint_grounding_preview\", \"memory_search_preview\", \"azure_ai_search\", + \"azure_function\", \"bing_grounding\", \"capture_structured_outputs\", and \"openapi\".""" @overload def __init__( @@ -101,30 +100,29 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class A2APreviewTool(Tool, discriminator="a2a_preview"): """An agent implementing the A2A protocol. - :ivar type: The type of the tool. Always ``"a2a_preview``. Required. + :ivar type: The type of the tool. Always ``"a2a_preview``. Required. A2A_PREVIEW. :vartype type: str or ~azure.ai.projects.models.A2A_PREVIEW :ivar base_url: Base URL of the agent. :vartype base_url: str - :ivar agent_card_path: The path to the agent card relative to the ``base_url``. - If not provided, defaults to ``/.well-known/agent-card.json``. + :ivar agent_card_path: The path to the agent card relative to the ``base_url``. If not + provided, defaults to ``/.well-known/agent-card.json``. :vartype agent_card_path: str - :ivar project_connection_id: The connection ID in the project for the A2A server. - The connection stores authentication and other connection details needed to connect to the A2A + :ivar project_connection_id: The connection ID in the project for the A2A server. The + connection stores authentication and other connection details needed to connect to the A2A server. :vartype project_connection_id: str """ type: Literal[ToolType.A2A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``\"a2a_preview``. Required.""" + """The type of the tool. Always ``\"a2a_preview``. Required. A2A_PREVIEW.""" base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Base URL of the agent.""" agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The path to the agent card relative to the ``base_url``. - If not provided, defaults to ``/.well-known/agent-card.json``.""" + """The path to the agent card relative to the ``base_url``. If not provided, defaults to + ``/.well-known/agent-card.json``.""" project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The connection ID in the project for the A2A server. - The connection stores authentication and other connection details needed to connect to the A2A - server.""" + """The connection ID in the project for the A2A server. The connection stores authentication and + other connection details needed to connect to the A2A server.""" @overload def __init__( @@ -147,20 +145,21 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ToolType.A2A_PREVIEW # type: ignore -class InsightResult(_Model): - """The result of the insights. +class InsightRequest(_Model): + """The request of the insights report. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AgentClusterInsightResult, EvalCompareReport, EvaluationRunClusterInsightResult + AgentClusterInsightRequest, EvaluationComparisonInsightRequest, + EvaluationRunClusterInsightRequest - :ivar type: The type of insights result. Required. Known values are: - "EvaluationRunClusterInsight", "AgentClusterInsight", and "EvaluationComparison". + :ivar type: The type of request. Required. Known values are: "EvaluationRunClusterInsight", + "AgentClusterInsight", and "EvaluationComparison". :vartype type: str or ~azure.ai.projects.models.InsightType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of insights result. Required. Known values are: \"EvaluationRunClusterInsight\", + """The type of request. Required. Known values are: \"EvaluationRunClusterInsight\", \"AgentClusterInsight\", and \"EvaluationComparison\".""" @overload @@ -181,27 +180,32 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AgentClusterInsightResult(InsightResult, discriminator="AgentClusterInsight"): - """Insights from the agent cluster analysis. +class AgentClusterInsightRequest(InsightRequest, discriminator="AgentClusterInsight"): + """Insights on set of Agent Evaluation Results. - :ivar type: The type of insights result. Required. Cluster Insight on an Agent. + :ivar type: The type of request. Required. Cluster Insight on an Agent. :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT - :ivar cluster_insight: Required. - :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult + :ivar agent_name: Identifier for the agent. Required. + :vartype agent_name: str + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration """ type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Cluster Insight on an Agent.""" - cluster_insight: "_models.ClusterInsightResult" = rest_field( - name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + """The type of request. Required. Cluster Insight on an Agent.""" + agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) + """Identifier for the agent. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] ) - """Required.""" + """Configuration of the model used in the insight generation.""" @overload def __init__( self, *, - cluster_insight: "_models.ClusterInsightResult", + agent_name: str, + model_configuration: Optional["_models.InsightModelConfiguration"] = None, ) -> None: ... @overload @@ -216,20 +220,20 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore -class InsightRequest(_Model): - """The request of the insights report. +class InsightResult(_Model): + """The result of the insights. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AgentClusterInsightsRequest, EvaluationComparisonRequest, EvaluationRunClusterInsightsRequest + AgentClusterInsightResult, EvaluationComparisonInsightResult, EvaluationRunClusterInsightResult - :ivar type: The type of request. Required. Known values are: "EvaluationRunClusterInsight", - "AgentClusterInsight", and "EvaluationComparison". + :ivar type: The type of insights result. Required. Known values are: + "EvaluationRunClusterInsight", "AgentClusterInsight", and "EvaluationComparison". :vartype type: str or ~azure.ai.projects.models.InsightType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of request. Required. Known values are: \"EvaluationRunClusterInsight\", + """The type of insights result. Required. Known values are: \"EvaluationRunClusterInsight\", \"AgentClusterInsight\", and \"EvaluationComparison\".""" @overload @@ -250,32 +254,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AgentClusterInsightsRequest(InsightRequest, discriminator="AgentClusterInsight"): - """Insights on set of Agent Evaluation Results. +class AgentClusterInsightResult(InsightResult, discriminator="AgentClusterInsight"): + """Insights from the agent cluster analysis. - :ivar type: The type of request. Required. Cluster Insight on an Agent. + :ivar type: The type of insights result. Required. Cluster Insight on an Agent. :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT - :ivar agent_name: Identifier for the agent. Required. - :vartype agent_name: str - :ivar model_configuration: Configuration of the model used in the insight generation. - :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult """ type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of request. Required. Cluster Insight on an Agent.""" - agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) - """Identifier for the agent. Required.""" - model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( - name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] + """The type of insights result. Required. Cluster Insight on an Agent.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] ) - """Configuration of the model used in the insight generation.""" + """Required.""" @overload def __init__( self, *, - agent_name: str, - model_configuration: Optional["_models.InsightModelConfiguration"] = None, + cluster_insight: "_models.ClusterInsightResult", ) -> None: ... @overload @@ -331,8 +330,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AgentDetails(_Model): """AgentDetails. - :ivar object: The object type, which is always 'agent'. Required. Default value is "agent". - :vartype object: str + :ivar object: The object type, which is always 'agent'. Required. AGENT. + :vartype object: str or ~azure.ai.projects.models.AGENT :ivar id: The unique identifier of the agent. Required. :vartype id: str :ivar name: The name of the agent. Required. @@ -341,8 +340,8 @@ class AgentDetails(_Model): :vartype versions: ~azure.ai.projects.models.AgentObjectVersions """ - object: Literal["agent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'agent'. Required. Default value is \"agent\".""" + object: Literal[AgentObjectType.AGENT] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'agent'. Required. AGENT.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique identifier of the agent. Required.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -354,6 +353,7 @@ class AgentDetails(_Model): def __init__( self, *, + object: Literal[AgentObjectType.AGENT], id: str, # pylint: disable=redefined-builtin name: str, versions: "_models.AgentObjectVersions", @@ -368,25 +368,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["agent"] = "agent" class BaseCredentials(_Model): """A base class for connection credentials. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - EntraIDCredentials, AgenticIdentityCredentials, ApiKeyCredentials, CustomCredential, + EntraIDCredentials, AgenticIdentityPreviewCredentials, ApiKeyCredentials, CustomCredential, NoAuthenticationCredentials, SASCredentials :ivar type: The type of credential used by the connection. Required. Known values are: - "ApiKey", "AAD", "SAS", "CustomKeys", "None", and "AgenticIdentityToken". + "ApiKey", "AAD", "SAS", "CustomKeys", "None", and "AgenticIdentityToken_Preview". :vartype type: str or ~azure.ai.projects.models.CredentialType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read"]) """The type of credential used by the connection. Required. Known values are: \"ApiKey\", \"AAD\", - \"SAS\", \"CustomKeys\", \"None\", and \"AgenticIdentityToken\".""" + \"SAS\", \"CustomKeys\", \"None\", and \"AgenticIdentityToken_Preview\".""" @overload def __init__( @@ -406,57 +405,19 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AgenticIdentityCredentials(BaseCredentials, discriminator="AgenticIdentityToken"): +class AgenticIdentityPreviewCredentials(BaseCredentials, discriminator="AgenticIdentityToken_Preview"): """Agentic identity credential definition. - :ivar type: The credential type. Required. Agentic identity credential - :vartype type: str or ~azure.ai.projects.models.AGENTIC_IDENTITY - """ - - type: Literal[CredentialType.AGENTIC_IDENTITY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Agentic identity credential""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.AGENTIC_IDENTITY # type: ignore - - -class AgentId(_Model): - """AgentId. - - :ivar type: Required. Default value is "agent_id". - :vartype type: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar version: The version identifier of the agent. Required. - :vartype version: str + :ivar type: The credential type. Required. Agentic identity credential. + :vartype type: str or ~azure.ai.projects.models.AGENTIC_IDENTITY_PREVIEW """ - type: Literal["agent_id"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required. Default value is \"agent_id\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent. Required.""" + type: Literal[CredentialType.AGENTIC_IDENTITY_PREVIEW] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Agentic identity credential.""" @overload def __init__( self, - *, - name: str, - version: str, ) -> None: ... @overload @@ -468,7 +429,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type: Literal["agent_id"] = "agent_id" + self.type = CredentialType.AGENTIC_IDENTITY_PREVIEW # type: ignore class AgentObjectVersions(_Model): @@ -499,44 +460,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AgentReference(_Model): - """AgentReference. - - :ivar type: Required. Default value is "agent_reference". - :vartype type: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar version: The version identifier of the agent. - :vartype version: str - """ - - type: Literal["agent_reference"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required. Default value is \"agent_reference\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent.""" - - @overload - def __init__( - self, - *, - name: str, - version: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type: Literal["agent_reference"] = "agent_reference" - - class EvaluationTaxonomyInput(_Model): """Input configuration for the evaluation taxonomy. @@ -573,17 +496,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AgentTaxonomyInput(EvaluationTaxonomyInput, discriminator="agent"): """Input configuration for the evaluation taxonomy when the input type is agent. - :ivar type: Input type of the evaluation taxonomy. Required. Agent + :ivar type: Input type of the evaluation taxonomy. Required. Agent. :vartype type: str or ~azure.ai.projects.models.AGENT :ivar target: Target configuration for the agent. Required. - :vartype target: ~azure.ai.projects.models.AzureAIAgentTarget + :vartype target: ~azure.ai.projects.models.Target :ivar risk_categories: List of risk categories to evaluate against. Required. :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] """ type: Literal[EvaluationTaxonomyInputType.AGENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Input type of the evaluation taxonomy. Required. Agent""" - target: "_models.AzureAIAgentTarget" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Input type of the evaluation taxonomy. Required. Agent.""" + target: "_models.Target" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Target configuration for the agent. Required.""" risk_categories: list[Union[str, "_models.RiskCategory"]] = rest_field( name="riskCategories", visibility=["read", "create", "update", "delete", "query"] @@ -594,7 +517,7 @@ class AgentTaxonomyInput(EvaluationTaxonomyInput, discriminator="agent"): def __init__( self, *, - target: "_models.AzureAIAgentTarget", + target: "_models.Target", risk_categories: list[Union[str, "_models.RiskCategory"]], ) -> None: ... @@ -620,9 +543,8 @@ class AgentVersionDetails(_Model): Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters. Required. :vartype metadata: dict[str, str] - :ivar object: The object type, which is always 'agent.version'. Required. Default value is - "agent.version". - :vartype object: str + :ivar object: The object type, which is always 'agent.version'. Required. AGENT_VERSION. + :vartype object: str or ~azure.ai.projects.models.AGENT_VERSION :ivar id: The unique identifier of the agent version. Required. :vartype id: str :ivar name: The name of the agent. Name can be used to retrieve/update/delete the agent. @@ -646,8 +568,10 @@ class AgentVersionDetails(_Model): Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters. Required.""" - object: Literal["agent.version"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'agent.version'. Required. Default value is \"agent.version\".""" + object: Literal[AgentObjectType.AGENT_VERSION] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type, which is always 'agent.version'. Required. AGENT_VERSION.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique identifier of the agent version. Required.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -669,6 +593,7 @@ def __init__( self, *, metadata: dict[str, str], + object: Literal[AgentObjectType.AGENT_VERSION], id: str, # pylint: disable=redefined-builtin name: str, version: str, @@ -686,7 +611,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["agent.version"] = "agent.version" class AISearchIndexResource(_Model): @@ -813,14 +737,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): """API Key Credential definition. - :ivar type: The credential type. Required. API Key credential + :ivar type: The credential type. Required. API Key credential. :vartype type: str or ~azure.ai.projects.models.API_KEY :ivar api_key: API Key. :vartype api_key: str """ type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. API Key credential""" + """The credential type. Required. API Key credential.""" api_key: Optional[str] = rest_field(name="key", visibility=["read"]) """API Key.""" @@ -841,76 +765,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = CredentialType.API_KEY # type: ignore -class ApplyPatchFileOperation(_Model): - """Apply patch operation. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ApplyPatchCreateFileOperation, ApplyPatchDeleteFileOperation, ApplyPatchUpdateFileOperation - - :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file". - :vartype type: str or ~azure.ai.projects.models.ApplyPatchFileOperationType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ApplyPatchCreateFileOperation(ApplyPatchFileOperation, discriminator="create_file"): - """Apply patch create file operation. - - :ivar type: Create a new file with the provided diff. Required. - :vartype type: str or ~azure.ai.projects.models.CREATE_FILE - :ivar path: Path of the file to create. Required. - :vartype path: str - :ivar diff: Diff to apply. Required. - :vartype diff: str - """ - - type: Literal[ApplyPatchFileOperationType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Create a new file with the provided diff. Required.""" - path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Path of the file to create. Required.""" - diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Diff to apply. Required.""" - - @overload - def __init__( - self, - *, - path: str, - diff: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ApplyPatchFileOperationType.CREATE_FILE # type: ignore - - class ApplyPatchOperationParam(_Model): """Apply patch operation. @@ -947,7 +801,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ApplyPatchCreateFileOperationParam(ApplyPatchOperationParam, discriminator="create_file"): """Apply patch create file operation. - :ivar type: The operation type. Always ``create_file``. Required. + :ivar type: The operation type. Always ``create_file``. Required. CREATE_FILE. :vartype type: str or ~azure.ai.projects.models.CREATE_FILE :ivar path: Path of the file to create relative to the workspace root. Required. :vartype path: str @@ -956,7 +810,7 @@ class ApplyPatchCreateFileOperationParam(ApplyPatchOperationParam, discriminator """ type: Literal[ApplyPatchOperationParamType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The operation type. Always ``create_file``. Required.""" + """The operation type. Always ``create_file``. Required. CREATE_FILE.""" path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Path of the file to create relative to the workspace root. Required.""" diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -982,50 +836,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ApplyPatchOperationParamType.CREATE_FILE # type: ignore -class ApplyPatchDeleteFileOperation(ApplyPatchFileOperation, discriminator="delete_file"): - """Apply patch delete file operation. - - :ivar type: Delete the specified file. Required. - :vartype type: str or ~azure.ai.projects.models.DELETE_FILE - :ivar path: Path of the file to delete. Required. - :vartype path: str - """ - - type: Literal[ApplyPatchFileOperationType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Delete the specified file. Required.""" - path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Path of the file to delete. Required.""" - - @overload - def __init__( - self, - *, - path: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ApplyPatchFileOperationType.DELETE_FILE # type: ignore - - class ApplyPatchDeleteFileOperationParam(ApplyPatchOperationParam, discriminator="delete_file"): """Apply patch delete file operation. - :ivar type: The operation type. Always ``delete_file``. Required. + :ivar type: The operation type. Always ``delete_file``. Required. DELETE_FILE. :vartype type: str or ~azure.ai.projects.models.DELETE_FILE :ivar path: Path of the file to delete relative to the workspace root. Required. :vartype path: str """ type: Literal[ApplyPatchOperationParamType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The operation type. Always ``delete_file``. Required.""" + """The operation type. Always ``delete_file``. Required. DELETE_FILE.""" path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Path of the file to delete relative to the workspace root. Required.""" @@ -1051,12 +872,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ApplyPatchToolParam(Tool, discriminator="apply_patch"): """Apply patch tool. - :ivar type: The type of the tool. Always ``apply_patch``. Required. + :ivar type: The type of the tool. Always ``apply_patch``. Required. APPLY_PATCH. :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH """ type: Literal[ToolType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``apply_patch``. Required.""" + """The type of the tool. Always ``apply_patch``. Required. APPLY_PATCH.""" @overload def __init__( @@ -1075,48 +896,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ToolType.APPLY_PATCH # type: ignore -class ApplyPatchUpdateFileOperation(ApplyPatchFileOperation, discriminator="update_file"): - """Apply patch update file operation. - - :ivar type: Update an existing file with the provided diff. Required. - :vartype type: str or ~azure.ai.projects.models.UPDATE_FILE - :ivar path: Path of the file to update. Required. - :vartype path: str - :ivar diff: Diff to apply. Required. - :vartype diff: str - """ - - type: Literal[ApplyPatchFileOperationType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Update an existing file with the provided diff. Required.""" - path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Path of the file to update. Required.""" - diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Diff to apply. Required.""" - - @overload - def __init__( - self, - *, - path: str, - diff: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ApplyPatchFileOperationType.UPDATE_FILE # type: ignore - - class ApplyPatchUpdateFileOperationParam(ApplyPatchOperationParam, discriminator="update_file"): """Apply patch update file operation. - :ivar type: The operation type. Always ``update_file``. Required. + :ivar type: The operation type. Always ``update_file``. Required. UPDATE_FILE. :vartype type: str or ~azure.ai.projects.models.UPDATE_FILE :ivar path: Path of the file to update relative to the workspace root. Required. :vartype path: str @@ -1125,7 +908,7 @@ class ApplyPatchUpdateFileOperationParam(ApplyPatchOperationParam, discriminator """ type: Literal[ApplyPatchOperationParamType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The operation type. Always ``update_file``. Required.""" + """The operation type. Always ``update_file``. Required. UPDATE_FILE.""" path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Path of the file to update relative to the workspace root. Required.""" diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -1276,16 +1059,58 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = "azure_ai_agent" # type: ignore -class Index(_Model): - """Index resource Definition. +class AzureAIModelTarget(Target, discriminator="azure_ai_model"): + """Represents a target specifying an Azure AI model for operations requiring model selection. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAISearchIndex, CosmosDBIndex, ManagedAzureAISearchIndex + :ivar type: The type of target, always ``azure_ai_model``. Required. Default value is + "azure_ai_model". + :vartype type: str + :ivar model: The unique identifier of the Azure AI model. + :vartype model: str + :ivar sampling_params: The parameters used to control the sampling behavior of the model during + text generation. + :vartype sampling_params: ~azure.ai.projects.models.ModelSamplingParams + """ - :ivar type: Type of index. Required. Known values are: "AzureSearch", - "CosmosDBNoSqlVectorStore", and "ManagedAzureSearch". - :vartype type: str or ~azure.ai.projects.models.IndexType - :ivar id: Asset ID, a unique identifier for the asset. + type: Literal["azure_ai_model"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of target, always ``azure_ai_model``. Required. Default value is \"azure_ai_model\".""" + model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the Azure AI model.""" + sampling_params: Optional["_models.ModelSamplingParams"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The parameters used to control the sampling behavior of the model during text generation.""" + + @overload + def __init__( + self, + *, + model: Optional[str] = None, + sampling_params: Optional["_models.ModelSamplingParams"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = "azure_ai_model" # type: ignore + + +class Index(_Model): + """Index resource Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureAISearchIndex, CosmosDBIndex, ManagedAzureAISearchIndex + + :ivar type: Type of index. Required. Known values are: "AzureSearch", + "CosmosDBNoSqlVectorStore", and "ManagedAzureSearch". + :vartype type: str or ~azure.ai.projects.models.IndexType + :ivar id: Asset ID, a unique identifier for the asset. :vartype id: str :ivar name: The name of the resource. Required. :vartype name: str @@ -1345,7 +1170,7 @@ class AzureAISearchIndex(Index, discriminator="AzureSearch"): :vartype description: str :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar type: Type of index. Required. Azure search + :ivar type: Type of index. Required. Azure search. :vartype type: str or ~azure.ai.projects.models.AZURE_SEARCH :ivar connection_name: Name of connection to Azure AI Search. Required. :vartype connection_name: str @@ -1356,7 +1181,7 @@ class AzureAISearchIndex(Index, discriminator="AzureSearch"): """ type: Literal[IndexType.AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. Azure search""" + """Type of index. Required. Azure search.""" connection_name: str = rest_field(name="connectionName", visibility=["create"]) """Name of connection to Azure AI Search. Required.""" index_name: str = rest_field(name="indexName", visibility=["create"]) @@ -1390,14 +1215,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AzureAISearchTool(Tool, discriminator="azure_ai_search"): """The input definition information for an Azure AI search tool as used to configure an agent. - :ivar type: The object type, which is always 'azure_ai_search'. Required. + :ivar type: The object type, which is always 'azure_ai_search'. Required. AZURE_AI_SEARCH. :vartype type: str or ~azure.ai.projects.models.AZURE_AI_SEARCH :ivar azure_ai_search: The azure ai search index resource. Required. :vartype azure_ai_search: ~azure.ai.projects.models.AzureAISearchToolResource """ type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'azure_ai_search'. Required.""" + """The object type, which is always 'azure_ai_search'. Required. AZURE_AI_SEARCH.""" azure_ai_search: "_models.AzureAISearchToolResource" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -1425,16 +1250,16 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AzureAISearchToolResource(_Model): """A set of index resources used by the ``azure_ai_search`` tool. - :ivar indexes: The indices attached to this agent. There can be a maximum of 1 index - resource attached to the agent. Required. + :ivar indexes: The indices attached to this agent. There can be a maximum of 1 index resource + attached to the agent. Required. :vartype indexes: list[~azure.ai.projects.models.AISearchIndexResource] """ indexes: list["_models.AISearchIndexResource"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The indices attached to this agent. There can be a maximum of 1 index - resource attached to the agent. Required.""" + """The indices attached to this agent. There can be a maximum of 1 index resource attached to the + agent. Required.""" @overload def __init__( @@ -1549,7 +1374,7 @@ class AzureFunctionDefinitionFunction(_Model): :vartype description: str :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. Required. - :vartype parameters: any + :vartype parameters: dict[str, any] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -1557,7 +1382,7 @@ class AzureFunctionDefinitionFunction(_Model): description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A description of what the function does, used by the model to choose when and how to call the function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The parameters the functions accepts, described as a JSON Schema object. Required.""" @overload @@ -1565,7 +1390,7 @@ def __init__( self, *, name: str, - parameters: Any, + parameters: dict[str, Any], description: Optional[str] = None, ) -> None: ... @@ -1617,14 +1442,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AzureFunctionTool(Tool, discriminator="azure_function"): """The input definition information for an Azure Function Tool, as used to configure an Agent. - :ivar type: The object type, which is always 'browser_automation'. Required. + :ivar type: The object type, which is always 'browser_automation'. Required. AZURE_FUNCTION. :vartype type: str or ~azure.ai.projects.models.AZURE_FUNCTION :ivar azure_function: The Azure Function Tool definition. Required. :vartype azure_function: ~azure.ai.projects.models.AzureFunctionDefinition """ type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'browser_automation'. Required.""" + """The object type, which is always 'browser_automation'. Required. AZURE_FUNCTION.""" azure_function: "_models.AzureFunctionDefinition" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -1779,13 +1604,15 @@ class BingCustomSearchPreviewTool(Tool, discriminator="bing_custom_search_previe """The input definition information for a Bing custom search tool as used to configure an agent. :ivar type: The object type, which is always 'bing_custom_search_preview'. Required. + BING_CUSTOM_SEARCH_PREVIEW. :vartype type: str or ~azure.ai.projects.models.BING_CUSTOM_SEARCH_PREVIEW :ivar bing_custom_search_preview: The bing custom search tool parameters. Required. :vartype bing_custom_search_preview: ~azure.ai.projects.models.BingCustomSearchToolParameters """ type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_custom_search_preview'. Required.""" + """The object type, which is always 'bing_custom_search_preview'. Required. + BING_CUSTOM_SEARCH_PREVIEW.""" bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -1814,16 +1641,15 @@ class BingCustomSearchToolParameters(_Model): """The bing custom search tool parameters. :ivar search_configurations: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. Required. + maximum of 1 connection resource attached to the tool. Required. :vartype search_configurations: list[~azure.ai.projects.models.BingCustomSearchConfiguration] """ search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool. Required.""" + """The project connections attached to this tool. There can be a maximum of 1 connection resource + attached to the tool. Required.""" @overload def __init__( @@ -1897,8 +1723,7 @@ class BingGroundingSearchToolParameters(_Model): """The bing grounding search tool parameters. :ivar search_configurations: The search configurations attached to this tool. There can be a - maximum of 1 - search configuration resource attached to the tool. Required. + maximum of 1 search configuration resource attached to the tool. Required. :vartype search_configurations: list[~azure.ai.projects.models.BingGroundingSearchConfiguration] """ @@ -1906,8 +1731,8 @@ class BingGroundingSearchToolParameters(_Model): search_configurations: list["_models.BingGroundingSearchConfiguration"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The search configurations attached to this tool. There can be a maximum of 1 - search configuration resource attached to the tool. Required.""" + """The search configurations attached to this tool. There can be a maximum of 1 search + configuration resource attached to the tool. Required.""" @overload def __init__( @@ -1931,14 +1756,14 @@ class BingGroundingTool(Tool, discriminator="bing_grounding"): """The input definition information for a bing grounding search tool as used to configure an agent. - :ivar type: The object type, which is always 'bing_grounding'. Required. + :ivar type: The object type, which is always 'bing_grounding'. Required. BING_GROUNDING. :vartype type: str or ~azure.ai.projects.models.BING_GROUNDING :ivar bing_grounding: The bing grounding search tool parameters. Required. :vartype bing_grounding: ~azure.ai.projects.models.BingGroundingSearchToolParameters """ type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_grounding'. Required.""" + """The object type, which is always 'bing_grounding'. Required. BING_GROUNDING.""" bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -2030,13 +1855,15 @@ class BrowserAutomationPreviewTool(Tool, discriminator="browser_automation_previ """The input definition information for a Browser Automation Tool, as used to configure an Agent. :ivar type: The object type, which is always 'browser_automation_preview'. Required. + BROWSER_AUTOMATION_PREVIEW. :vartype type: str or ~azure.ai.projects.models.BROWSER_AUTOMATION_PREVIEW :ivar browser_automation_preview: The Browser Automation Tool parameters. Required. :vartype browser_automation_preview: ~azure.ai.projects.models.BrowserAutomationToolParameters """ type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'browser_automation_preview'. Required.""" + """The object type, which is always 'browser_automation_preview'. Required. + BROWSER_AUTOMATION_PREVIEW.""" browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -2125,13 +1952,15 @@ class CaptureStructuredOutputsTool(Tool, discriminator="capture_structured_outpu """A tool for capturing structured outputs. :ivar type: The type of the tool. Always ``capture_structured_outputs``. Required. + CAPTURE_STRUCTURED_OUTPUTS. :vartype type: str or ~azure.ai.projects.models.CAPTURE_STRUCTURED_OUTPUTS :ivar outputs: The structured outputs to capture from the model. Required. :vartype outputs: ~azure.ai.projects.models.StructuredOutputDefinition """ type: Literal[ToolType.CAPTURE_STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``capture_structured_outputs``. Required.""" + """The type of the tool. Always ``capture_structured_outputs``. Required. + CAPTURE_STRUCTURED_OUTPUTS.""" outputs: "_models.StructuredOutputDefinition" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -2329,7 +2158,7 @@ class ClickParam(ComputerAction, discriminator="click"): """Click. :ivar type: Specifies the event type. For a click action, this property is always ``click``. - Required. + Required. CLICK. :vartype type: str or ~azure.ai.projects.models.CLICK :ivar button: Indicates which mouse button was pressed during the click. One of ``left``, ``right``, ``wheel``, ``back``, or ``forward``. Required. Known values are: "left", "right", @@ -2342,7 +2171,8 @@ class ClickParam(ComputerAction, discriminator="click"): """ type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a click action, this property is always ``click``. Required.""" + """Specifies the event type. For a click action, this property is always ``click``. Required. + CLICK.""" button: Union[str, "_models.ClickButtonType"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -2507,10 +2337,10 @@ class EvaluatorDefinition(_Model): :vartype type: str or ~azure.ai.projects.models.EvaluatorDefinitionType :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters like type, properties, required. - :vartype init_parameters: any + :vartype init_parameters: dict[str, any] :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like type, properties, required. - :vartype data_schema: any + :vartype data_schema: dict[str, any] :ivar metrics: List of output metrics produced by this evaluator. :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] """ @@ -2519,10 +2349,10 @@ class EvaluatorDefinition(_Model): type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) """The type of evaluator definition. Required. Known values are: \"prompt\", \"code\", \"prompt_and_code\", \"service\", and \"openai_graders\".""" - init_parameters: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + init_parameters: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters like type, properties, required.""" - data_schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + data_schema: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like type, properties, required.""" metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = rest_field( @@ -2535,8 +2365,8 @@ def __init__( self, *, type: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, + init_parameters: Optional[dict[str, Any]] = None, + data_schema: Optional[dict[str, Any]] = None, metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, ) -> None: ... @@ -2556,20 +2386,20 @@ class CodeBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="code"): :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters like type, properties, required. - :vartype init_parameters: any + :vartype init_parameters: dict[str, any] :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like type, properties, required. - :vartype data_schema: any + :vartype data_schema: dict[str, any] :ivar metrics: List of output metrics produced by this evaluator. :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] - :ivar type: Required. Code-based definition + :ivar type: Required. Code-based definition. :vartype type: str or ~azure.ai.projects.models.CODE :ivar code_text: Inline code text for the evaluator. Required. :vartype code_text: str """ type: Literal[EvaluatorDefinitionType.CODE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Code-based definition""" + """Required. Code-based definition.""" code_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Inline code text for the evaluator. Required.""" @@ -2578,8 +2408,8 @@ def __init__( self, *, code_text: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, + init_parameters: Optional[dict[str, Any]] = None, + data_schema: Optional[dict[str, Any]] = None, metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, ) -> None: ... @@ -2705,29 +2535,30 @@ class CodeInterpreterTool(Tool, discriminator="code_interpreter"): """Code interpreter. :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required. + CODE_INTERPRETER. :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER :ivar container: The code interpreter container. Can be a container ID or an object that - specifies uploaded file IDs to make available to your code, along with an - optional ``memory_limit`` setting. Required. Is either a str type or a + specifies uploaded file IDs to make available to your code, along with an optional + ``memory_limit`` setting. If not provided, the service assumes auto. Is either a str type or a CodeInterpreterContainerAuto type. :vartype container: str or ~azure.ai.projects.models.CodeInterpreterContainerAuto """ type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the code interpreter tool. Always ``code_interpreter``. Required.""" - container: Union[str, "_models.CodeInterpreterContainerAuto"] = rest_field( + """The type of the code interpreter tool. Always ``code_interpreter``. Required. CODE_INTERPRETER.""" + container: Optional[Union[str, "_models.CodeInterpreterContainerAuto"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The code interpreter container. Can be a container ID or an object that - specifies uploaded file IDs to make available to your code, along with an - optional ``memory_limit`` setting. Required. Is either a str type or a - CodeInterpreterContainerAuto type.""" + """The code interpreter container. Can be a container ID or an object that specifies uploaded file + IDs to make available to your code, along with an optional ``memory_limit`` setting. If not + provided, the service assumes auto. Is either a str type or a CodeInterpreterContainerAuto + type.""" @overload def __init__( self, *, - container: Union[str, "_models.CodeInterpreterContainerAuto"], + container: Optional[Union[str, "_models.CodeInterpreterContainerAuto"]] = None, ) -> None: ... @overload @@ -2887,8 +2718,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ComputerScreenshotImage(_Model): """A computer screenshot image used with the computer use tool. - :ivar type: Specifies the event type. For a computer screenshot, this property is - always set to ``computer_screenshot``. Required. Default value is "computer_screenshot". + :ivar type: Specifies the event type. For a computer screenshot, this property is always set to + ``computer_screenshot``. Required. Default value is "computer_screenshot". :vartype type: str :ivar image_url: The URL of the screenshot image. :vartype image_url: str @@ -2897,8 +2728,8 @@ class ComputerScreenshotImage(_Model): """ type: Literal["computer_screenshot"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Specifies the event type. For a computer screenshot, this property is - always set to ``computer_screenshot``. Required. Default value is \"computer_screenshot\".""" + """Specifies the event type. For a computer screenshot, this property is always set to + ``computer_screenshot``. Required. Default value is \"computer_screenshot\".""" image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The URL of the screenshot image.""" file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -2928,6 +2759,7 @@ class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): """Computer use preview. :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required. + COMPUTER_USE_PREVIEW. :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW :ivar environment: The type of computer environment to control. Required. Known values are: "windows", "mac", "linux", "ubuntu", and "browser". @@ -2939,7 +2771,8 @@ class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): """ type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer use tool. Always ``computer_use_preview``. Required.""" + """The type of the computer use tool. Always ``computer_use_preview``. Required. + COMPUTER_USE_PREVIEW.""" environment: Union[str, "_models.ComputerEnvironment"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -2980,7 +2813,7 @@ class Connection(_Model): :vartype id: str :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", - "CustomKeys", and "RemoteTool". + "CustomKeys", and "RemoteTool_Preview". :vartype type: str or ~azure.ai.projects.models.ConnectionType :ivar target: The connection URL to be used for this service. Required. :vartype target: str @@ -3000,7 +2833,7 @@ class Connection(_Model): type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", - \"AppInsights\", \"CustomKeys\", and \"RemoteTool\".""" + \"AppInsights\", \"CustomKeys\", and \"RemoteTool_Preview\".""" target: str = rest_field(visibility=["read"]) """The connection URL to be used for this service. Required.""" is_default: bool = rest_field(name="isDefault", visibility=["read"]) @@ -3016,7 +2849,7 @@ class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app" :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. + :ivar kind: Required. CONTAINER_APP. :vartype kind: str or ~azure.ai.projects.models.CONTAINER_APP :ivar container_protocol_versions: The protocols that the agent supports for ingress communication of the containers. Required. @@ -3031,7 +2864,7 @@ class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app" """ kind: Literal[AgentKind.CONTAINER_APP] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + """Required. CONTAINER_APP.""" container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -3070,7 +2903,7 @@ class ContainerFileCitationBody(Annotation, discriminator="container_file_citati """Container file citation. :ivar type: The type of the container file citation. Always ``container_file_citation``. - Required. + Required. CONTAINER_FILE_CITATION. :vartype type: str or ~azure.ai.projects.models.CONTAINER_FILE_CITATION :ivar container_id: The ID of the container file. Required. :vartype container_id: str @@ -3087,7 +2920,8 @@ class ContainerFileCitationBody(Annotation, discriminator="container_file_citati """ type: Literal[AnnotationType.CONTAINER_FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the container file citation. Always ``container_file_citation``. Required.""" + """The type of the container file citation. Always ``container_file_citation``. Required. + CONTAINER_FILE_CITATION.""" container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The ID of the container file. Required.""" file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3209,7 +3043,7 @@ class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): :vartype description: str :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar type: Type of index. Required. CosmosDB + :ivar type: Type of index. Required. CosmosDB. :vartype type: str or ~azure.ai.projects.models.COSMOS_DB :ivar connection_name: Name of connection to CosmosDB. Required. :vartype connection_name: str @@ -3224,7 +3058,7 @@ class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): """ type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. CosmosDB""" + """Type of index. Required. CosmosDB.""" connection_name: str = rest_field(name="connectionName", visibility=["create"]) """Name of connection to CosmosDB. Required.""" database_name: str = rest_field(name="databaseName", visibility=["create"]) @@ -3263,39 +3097,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = IndexType.COSMOS_DB # type: ignore -class CreatedBy(_Model): - """CreatedBy. - - :ivar agent: The agent that created the item. - :vartype agent: ~azure.ai.projects.models.AgentId - :ivar response_id: The response on which the item is created. - :vartype response_id: str - """ - - agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The agent that created the item.""" - response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response on which the item is created.""" - - @overload - def __init__( - self, - *, - agent: Optional["_models.AgentId"] = None, - response_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class Trigger(_Model): """Base model for Trigger of the schedule. @@ -3380,12 +3181,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class CustomCredential(BaseCredentials, discriminator="CustomKeys"): """Custom credential definition. - :ivar type: The credential type. Required. Custom credential + :ivar type: The credential type. Required. Custom credential. :vartype type: str or ~azure.ai.projects.models.CUSTOM """ type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Custom credential""" + """The credential type. Required. Custom credential.""" @overload def __init__( @@ -3439,7 +3240,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class CustomGrammarFormatParam(CustomToolParamFormat, discriminator="grammar"): """Grammar format. - :ivar type: Grammar format. Always ``grammar``. Required. + :ivar type: Grammar format. Always ``grammar``. Required. GRAMMAR. :vartype type: str or ~azure.ai.projects.models.GRAMMAR :ivar syntax: The syntax of the grammar definition. One of ``lark`` or ``regex``. Required. Known values are: "lark" and "regex". @@ -3449,7 +3250,7 @@ class CustomGrammarFormatParam(CustomToolParamFormat, discriminator="grammar"): """ type: Literal[CustomToolParamFormatType.GRAMMAR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Grammar format. Always ``grammar``. Required.""" + """Grammar format. Always ``grammar``. Required. GRAMMAR.""" syntax: Union[str, "_models.GrammarSyntax1"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -3481,12 +3282,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class CustomTextFormatParam(CustomToolParamFormat, discriminator="text"): """Text format. - :ivar type: Unconstrained text format. Always ``text``. Required. + :ivar type: Unconstrained text format. Always ``text``. Required. TEXT. :vartype type: str or ~azure.ai.projects.models.TEXT """ type: Literal[CustomToolParamFormatType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Unconstrained text format. Always ``text``. Required.""" + """Unconstrained text format. Always ``text``. Required. TEXT.""" @overload def __init__( @@ -3508,7 +3309,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class CustomToolParam(Tool, discriminator="custom"): """Custom tool. - :ivar type: The type of the custom tool. Always ``custom``. Required. + :ivar type: The type of the custom tool. Always ``custom``. Required. CUSTOM. :vartype type: str or ~azure.ai.projects.models.CUSTOM :ivar name: The name of the custom tool, used to identify it in tool calls. Required. :vartype name: str @@ -3519,7 +3320,7 @@ class CustomToolParam(Tool, discriminator="custom"): """ type: Literal[ToolType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the custom tool. Always ``custom``. Required.""" + """The type of the custom tool. Always ``custom``. Required. CUSTOM.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the custom tool, used to identify it in tool calls. Required.""" description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3725,17 +3526,18 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class DeleteAgentResponse(_Model): """A deleted agent Object. - :ivar object: The object type. Always 'agent.deleted'. Required. Default value is - "agent.deleted". - :vartype object: str + :ivar object: The object type. Always 'agent.deleted'. Required. AGENT_DELETED. + :vartype object: str or ~azure.ai.projects.models.AGENT_DELETED :ivar name: The name of the agent. Required. :vartype name: str :ivar deleted: Whether the agent was successfully deleted. Required. :vartype deleted: bool """ - object: Literal["agent.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'agent.deleted'. Required. Default value is \"agent.deleted\".""" + object: Literal[AgentObjectType.AGENT_DELETED] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type. Always 'agent.deleted'. Required. AGENT_DELETED.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the agent. Required.""" deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3745,6 +3547,7 @@ class DeleteAgentResponse(_Model): def __init__( self, *, + object: Literal[AgentObjectType.AGENT_DELETED], name: str, deleted: bool, ) -> None: ... @@ -3758,15 +3561,13 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["agent.deleted"] = "agent.deleted" class DeleteAgentVersionResponse(_Model): """A deleted agent version Object. - :ivar object: The object type. Always 'agent.deleted'. Required. Default value is - "agent.version.deleted". - :vartype object: str + :ivar object: The object type. Always 'agent.version.deleted'. Required. AGENT_VERSION_DELETED. + :vartype object: str or ~azure.ai.projects.models.AGENT_VERSION_DELETED :ivar name: The name of the agent. Required. :vartype name: str :ivar version: The version identifier of the agent. Required. @@ -3775,8 +3576,10 @@ class DeleteAgentVersionResponse(_Model): :vartype deleted: bool """ - object: Literal["agent.version.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'agent.deleted'. Required. Default value is \"agent.version.deleted\".""" + object: Literal[AgentObjectType.AGENT_VERSION_DELETED] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type. Always 'agent.version.deleted'. Required. AGENT_VERSION_DELETED.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the agent. Required.""" version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3788,6 +3591,7 @@ class DeleteAgentVersionResponse(_Model): def __init__( self, *, + object: Literal[AgentObjectType.AGENT_VERSION_DELETED], name: str, version: str, deleted: bool, @@ -3802,24 +3606,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["agent.version.deleted"] = "agent.version.deleted" class DeleteMemoryStoreResult(_Model): """DeleteMemoryStoreResult. - :ivar object: The object type. Always 'memory_store.deleted'. Required. Default value is - "memory_store.deleted". - :vartype object: str + :ivar object: The object type. Always 'memory_store.deleted'. Required. MEMORY_STORE_DELETED. + :vartype object: str or ~azure.ai.projects.models.MEMORY_STORE_DELETED :ivar name: The name of the memory store. Required. :vartype name: str :ivar deleted: Whether the memory store was successfully deleted. Required. :vartype deleted: bool """ - object: Literal["memory_store.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'memory_store.deleted'. Required. Default value is - \"memory_store.deleted\".""" + object: Literal[MemoryStoreObjectType.MEMORY_STORE_DELETED] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type. Always 'memory_store.deleted'. Required. MEMORY_STORE_DELETED.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the memory store. Required.""" deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3829,6 +3632,7 @@ class DeleteMemoryStoreResult(_Model): def __init__( self, *, + object: Literal[MemoryStoreObjectType.MEMORY_STORE_DELETED], name: str, deleted: bool, ) -> None: ... @@ -3842,7 +3646,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["memory_store.deleted"] = "memory_store.deleted" class Deployment(_Model): @@ -3885,7 +3688,7 @@ class DoubleClickAction(ComputerAction, discriminator="double_click"): """DoubleClick. :ivar type: Specifies the event type. For a double click action, this property is always set to - ``double_click``. Required. + ``double_click``. Required. DOUBLE_CLICK. :vartype type: str or ~azure.ai.projects.models.DOUBLE_CLICK :ivar x: The x-coordinate where the double click occurred. Required. :vartype x: int @@ -3895,7 +3698,7 @@ class DoubleClickAction(ComputerAction, discriminator="double_click"): type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """Specifies the event type. For a double click action, this property is always set to - ``double_click``. Required.""" + ``double_click``. Required. DOUBLE_CLICK.""" x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The x-coordinate where the double click occurred. Required.""" y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3924,8 +3727,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Drag(ComputerAction, discriminator="drag"): """Drag. - :ivar type: Specifies the event type. For a drag action, this property is - always set to ``drag``. Required. + :ivar type: Specifies the event type. For a drag action, this property is always set to + ``drag``. Required. DRAG. :vartype type: str or ~azure.ai.projects.models.DRAG :ivar path: An array of coordinates representing the path of the drag action. Coordinates will appear as an array of objects, eg @@ -3940,8 +3743,8 @@ class Drag(ComputerAction, discriminator="drag"): """ type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a drag action, this property is - always set to ``drag``. Required.""" + """Specifies the event type. For a drag action, this property is always set to ``drag``. Required. + DRAG.""" path: list["_models.DragPoint"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """An array of coordinates representing the path of the drag action. Coordinates will appear as an array of objects, eg @@ -4006,9 +3809,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputItem(_Model): - """An item representing part of the context for the response to be - generated by the model. Can contain text, images, and audio inputs, - as well as previous assistant responses and tool call outputs. + """An item representing part of the context for the response to be generated by the model. Can + contain text, images, and audio inputs, as well as previous assistant responses and tool call + outputs. You probably want to use the sub-classes and not this class directly. Known sub-classes are: InputItemApplyPatchToolCallItemParam, InputItemApplyPatchToolCallOutputItemParam, @@ -4063,17 +3866,16 @@ class EasyInputMessage(InputItem, discriminator="message"): """Input message. :ivar role: The role of the message input. One of ``user``, ``assistant``, ``system``, or - ``developer``. Required. Is one of the following types: Literal["user"], - Literal["assistant"], Literal["system"], Literal["developer"] + ``developer``. Required. Is one of the following types: Literal["user"], Literal["assistant"], + Literal["system"], Literal["developer"] :vartype role: str or str or str or str - :ivar content: Text, image, or audio input to the model, used to generate a response. - Can also contain previous assistant responses. Required. Is either a str type or a - [InputContent] type. + :ivar content: Text, image, or audio input to the model, used to generate a response. Can also + contain previous assistant responses. Required. Is either a str type or a [InputContent] type. :vartype content: str or list[~azure.ai.projects.models.InputContent] - :ivar type: The type of the message input. Always ``message``. Required. + :ivar type: The type of the message input. Always ``message``. Required. MESSAGE. :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: + :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. + Populated when items are returned via API. Is one of the following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] :vartype status: str or str or str """ @@ -4081,23 +3883,22 @@ class EasyInputMessage(InputItem, discriminator="message"): role: Literal["user", "assistant", "system", "developer"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The role of the message input. One of ``user``, ``assistant``, ``system``, or - ``developer``. Required. Is one of the following types: Literal[\"user\"], - Literal[\"assistant\"], Literal[\"system\"], Literal[\"developer\"]""" + """The role of the message input. One of ``user``, ``assistant``, ``system``, or ``developer``. + Required. Is one of the following types: Literal[\"user\"], Literal[\"assistant\"], + Literal[\"system\"], Literal[\"developer\"]""" content: Union[str, list["_models.InputContent"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Text, image, or audio input to the model, used to generate a response. - Can also contain previous assistant responses. Required. Is either a str type or a - [InputContent] type.""" + """Text, image, or audio input to the model, used to generate a response. Can also contain + previous assistant responses. Required. Is either a str type or a [InputContent] type.""" type: Literal[InputItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the message input. Always ``message``. Required.""" + """The type of the message input. Always ``message``. Required. MESSAGE.""" status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when + items are returned via API. Is one of the following types: Literal[\"in_progress\"], + Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( @@ -4158,12 +3959,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class EntraIDCredentials(BaseCredentials, discriminator="AAD"): """Entra ID credential definition. - :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD) + :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD). :vartype type: str or ~azure.ai.projects.models.ENTRA_ID """ type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Entra ID credential (formerly known as AAD)""" + """The credential type. Required. Entra ID credential (formerly known as AAD).""" @overload def __init__( @@ -4189,9 +3990,9 @@ class Error(_Model): :vartype code: str :ivar message: Required. :vartype message: str - :ivar param: Required. + :ivar param: :vartype param: str - :ivar type: Required. + :ivar type: :vartype type: str :ivar details: :vartype details: list[~azure.ai.projects.models.Error] @@ -4205,10 +4006,8 @@ class Error(_Model): """Required.""" message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" - param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" + param: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + type: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) details: Optional[list["_models.Error"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) additional_info: Optional[dict[str, Any]] = rest_field( name="additionalInfo", visibility=["read", "create", "update", "delete", "query"] @@ -4223,8 +4022,8 @@ def __init__( *, code: str, message: str, - param: str, - type: str, + param: Optional[str] = None, + type: Optional[str] = None, details: Optional[list["_models.Error"]] = None, additional_info: Optional[dict[str, Any]] = None, debug_info: Optional[dict[str, Any]] = None, @@ -4241,46 +4040,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class EvalCompareReport(InsightResult, discriminator="EvaluationComparison"): - """Insights from the evaluation comparison. - - :ivar type: The type of insights result. Required. Evaluation Comparison. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON - :ivar comparisons: Comparison results for each treatment run against the baseline. Required. - :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] - :ivar method: The statistical method used for comparison. Required. - :vartype method: str - """ - - type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Evaluation Comparison.""" - comparisons: list["_models.EvalRunResultComparison"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Comparison results for each treatment run against the baseline. Required.""" - method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The statistical method used for comparison. Required.""" - - @overload - def __init__( - self, - *, - comparisons: list["_models.EvalRunResultComparison"], - method: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_COMPARISON # type: ignore - - class EvalResult(_Model): """Result of the evaluation. @@ -4479,7 +4238,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class EvaluationComparisonRequest(InsightRequest, discriminator="EvaluationComparison"): +class EvaluationComparisonInsightRequest(InsightRequest, discriminator="EvaluationComparison"): """Evaluation Comparison Request. :ivar type: The type of request. Required. Evaluation Comparison. @@ -4524,6 +4283,46 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = InsightType.EVALUATION_COMPARISON # type: ignore +class EvaluationComparisonInsightResult(InsightResult, discriminator="EvaluationComparison"): + """Insights from the evaluation comparison. + + :ivar type: The type of insights result. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar comparisons: Comparison results for each treatment run against the baseline. Required. + :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] + :ivar method: The statistical method used for comparison. Required. + :vartype method: str + """ + + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Evaluation Comparison.""" + comparisons: list["_models.EvalRunResultComparison"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Comparison results for each treatment run against the baseline. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The statistical method used for comparison. Required.""" + + @overload + def __init__( + self, + *, + comparisons: list["_models.EvalRunResultComparison"], + method: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_COMPARISON # type: ignore + + class InsightSample(_Model): """A sample from the analysis. @@ -4714,27 +4513,37 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): - """Insights from the evaluation run cluster analysis. +class EvaluationRunClusterInsightRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): + """Insights on set of Evaluation Results. - :ivar type: The type of insights result. Required. Insights on an Evaluation run result. + :ivar type: The type of insights request. Required. Insights on an Evaluation run result. :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT - :ivar cluster_insight: Required. - :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult + :ivar eval_id: Evaluation Id for the insights. Required. + :vartype eval_id: str + :ivar run_ids: List of evaluation run IDs for the insights. Required. + :vartype run_ids: list[str] + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration """ type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Insights on an Evaluation run result.""" - cluster_insight: "_models.ClusterInsightResult" = rest_field( - name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + """The type of insights request. Required. Insights on an Evaluation run result.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Evaluation Id for the insights. Required.""" + run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) + """List of evaluation run IDs for the insights. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] ) - """Required.""" + """Configuration of the model used in the insight generation.""" @overload def __init__( self, *, - cluster_insight: "_models.ClusterInsightResult", + eval_id: str, + run_ids: list[str], + model_configuration: Optional["_models.InsightModelConfiguration"] = None, ) -> None: ... @overload @@ -4749,37 +4558,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore -class EvaluationRunClusterInsightsRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): - """Insights on set of Evaluation Results. +class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): + """Insights from the evaluation run cluster analysis. - :ivar type: The type of insights request. Required. Insights on an Evaluation run result. + :ivar type: The type of insights result. Required. Insights on an Evaluation run result. :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT - :ivar eval_id: Evaluation Id for the insights. Required. - :vartype eval_id: str - :ivar run_ids: List of evaluation run IDs for the insights. Required. - :vartype run_ids: list[str] - :ivar model_configuration: Configuration of the model used in the insight generation. - :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult """ type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights request. Required. Insights on an Evaluation run result.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Evaluation Id for the insights. Required.""" - run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) - """List of evaluation run IDs for the insights. Required.""" - model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( - name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] + """The type of insights result. Required. Insights on an Evaluation run result.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] ) - """Configuration of the model used in the insight generation.""" + """Required.""" @overload def __init__( self, *, - eval_id: str, - run_ids: list[str], - model_configuration: Optional["_models.InsightModelConfiguration"] = None, + cluster_insight: "_models.ClusterInsightResult", ) -> None: ... @overload @@ -5008,9 +4807,9 @@ class EvaluatorVersion(_Model): :ivar created_by: Creator of the evaluator. Required. :vartype created_by: str :ivar created_at: Creation date/time of the evaluator. Required. - :vartype created_at: int + :vartype created_at: str :ivar modified_at: Last modified date/time of the evaluator. Required. - :vartype modified_at: int + :vartype modified_at: str :ivar id: Asset ID, a unique identifier for the asset. :vartype id: str :ivar name: The name of the resource. Required. @@ -5038,9 +4837,9 @@ class EvaluatorVersion(_Model): """Definition of the evaluator. Required.""" created_by: str = rest_field(visibility=["read"]) """Creator of the evaluator. Required.""" - created_at: int = rest_field(visibility=["read"]) + created_at: str = rest_field(visibility=["read"]) """Creation date/time of the evaluator. Required.""" - modified_at: int = rest_field(visibility=["read"]) + modified_at: str = rest_field(visibility=["read"]) """Last modified date/time of the evaluator. Required.""" id: Optional[str] = rest_field(visibility=["read"]) """Asset ID, a unique identifier for the asset.""" @@ -5081,16 +4880,15 @@ class FabricDataAgentToolParameters(_Model): """The fabric data agent tool parameters. :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. + maximum of 1 connection resource attached to the tool. :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] """ project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" + """The project connections attached to this tool. There can be a maximum of 1 connection resource + attached to the tool.""" @overload def __init__( @@ -5166,7 +4964,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FileCitationBody(Annotation, discriminator="file_citation"): """File citation. - :ivar type: The type of the file citation. Always ``file_citation``. Required. + :ivar type: The type of the file citation. Always ``file_citation``. Required. FILE_CITATION. :vartype type: str or ~azure.ai.projects.models.FILE_CITATION :ivar file_id: The ID of the file. Required. :vartype file_id: str @@ -5177,7 +4975,7 @@ class FileCitationBody(Annotation, discriminator="file_citation"): """ type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file citation. Always ``file_citation``. Required.""" + """The type of the file citation. Always ``file_citation``. Required. FILE_CITATION.""" file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The ID of the file. Required.""" index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -5261,7 +5059,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FilePath(Annotation, discriminator="file_path"): """File path. - :ivar type: The type of the file path. Always ``file_path``. Required. + :ivar type: The type of the file path. Always ``file_path``. Required. FILE_PATH. :vartype type: str or ~azure.ai.projects.models.FILE_PATH :ivar file_id: The ID of the file. Required. :vartype file_id: str @@ -5270,7 +5068,7 @@ class FilePath(Annotation, discriminator="file_path"): """ type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file path. Always ``file_path``. Required.""" + """The type of the file path. Always ``file_path``. Required. FILE_PATH.""" file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The ID of the file. Required.""" index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -5299,7 +5097,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FileSearchTool(Tool, discriminator="file_search"): """File search. - :ivar type: The type of the file search tool. Always ``file_search``. Required. + :ivar type: The type of the file search tool. Always ``file_search``. Required. FILE_SEARCH. :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH :ivar vector_store_ids: The IDs of the vector stores to search. Required. :vartype vector_store_ids: list[str] @@ -5314,7 +5112,7 @@ class FileSearchTool(Tool, discriminator="file_search"): """ type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file search tool. Always ``file_search``. Required.""" + """The type of the file search tool. Always ``file_search``. Required. FILE_SEARCH.""" vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The IDs of the vector stores to search. Required.""" max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -5323,9 +5121,7 @@ class FileSearchTool(Tool, discriminator="file_search"): visibility=["read", "create", "update", "delete", "query"] ) """Ranking options for search.""" - filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) + filters: Optional["_types.Filters"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Is either a ComparisonFilter type or a CompoundFilter type.""" @overload @@ -5335,7 +5131,7 @@ def __init__( vector_store_ids: list[str], max_num_results: Optional[int] = None, ranking_options: Optional["_models.RankingOptions"] = None, - filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = None, + filters: Optional["_types.Filters"] = None, ) -> None: ... @overload @@ -5486,7 +5282,7 @@ class FunctionAndCustomToolCallOutputInputFileContent( ): # pylint: disable=name-too-long """Input file. - :ivar type: The type of the input item. Always ``input_file``. Required. + :ivar type: The type of the input item. Always ``input_file``. Required. INPUT_FILE. :vartype type: str or ~azure.ai.projects.models.INPUT_FILE :ivar file_id: :vartype file_id: str @@ -5499,7 +5295,7 @@ class FunctionAndCustomToolCallOutputInputFileContent( """ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_file``. Required.""" + """The type of the input item. Always ``input_file``. Required. INPUT_FILE.""" file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the file to be sent to the model.""" @@ -5535,7 +5331,7 @@ class FunctionAndCustomToolCallOutputInputImageContent( ): # pylint: disable=name-too-long """Input image. - :ivar type: The type of the input item. Always ``input_image``. Required. + :ivar type: The type of the input item. Always ``input_image``. Required. INPUT_IMAGE. :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE :ivar image_url: :vartype image_url: str @@ -5547,7 +5343,7 @@ class FunctionAndCustomToolCallOutputInputImageContent( """ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_image``. Required.""" + """The type of the input item. Always ``input_image``. Required. INPUT_IMAGE.""" image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -5580,14 +5376,14 @@ class FunctionAndCustomToolCallOutputInputTextContent( ): # pylint: disable=name-too-long """Input text. - :ivar type: The type of the input item. Always ``input_text``. Required. + :ivar type: The type of the input item. Always ``input_text``. Required. INPUT_TEXT. :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT :ivar text: The text input to the model. Required. :vartype text: str """ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_text``. Required.""" + """The type of the input item. Always ``input_text``. Required. INPUT_TEXT.""" text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The text input to the model. Required.""" @@ -5610,44 +5406,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = FunctionAndCustomToolCallOutputType.INPUT_TEXT # type: ignore -class FunctionShellAction(_Model): - """Shell exec action. - - :ivar commands: Required. - :vartype commands: list[str] - :ivar timeout_ms: Required. - :vartype timeout_ms: int - :ivar max_output_length: Required. - :vartype max_output_length: int - """ - - commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - timeout_ms: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - commands: list[str], - timeout_ms: int, - max_output_length: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class FunctionShellActionParam(_Model): """Shell action. @@ -5684,33 +5442,25 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class FunctionShellCallOutputContent(_Model): - """Shell call output content. +class FunctionShellCallOutputContentParam(_Model): + """Shell output content. - :ivar stdout: The standard output that was captured. Required. + :ivar stdout: Captured stdout output for the shell call. Required. :vartype stdout: str - :ivar stderr: The standard error output that was captured. Required. + :ivar stderr: Captured stderr output for the shell call. Required. :vartype stderr: str - :ivar outcome: Represents either an exit outcome (with an exit code) or a timeout outcome for a - shell call output chunk. Required. - :vartype outcome: ~azure.ai.projects.models.FunctionShellCallOutputOutcome - :ivar created_by: The identifier of the actor that created the item. - :vartype created_by: str + :ivar outcome: The exit or timeout outcome associated with this shell call. Required. + :vartype outcome: ~azure.ai.projects.models.FunctionShellCallOutputOutcomeParam """ stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The standard output that was captured. Required.""" + """Captured stdout output for the shell call. Required.""" stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The standard error output that was captured. Required.""" - outcome: "_models.FunctionShellCallOutputOutcome" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Represents either an exit outcome (with an exit code) or a timeout outcome for a shell call - output chunk. Required.""" - created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] + """Captured stderr output for the shell call. Required.""" + outcome: "_models.FunctionShellCallOutputOutcomeParam" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The identifier of the actor that created the item.""" + """The exit or timeout outcome associated with this shell call. Required.""" @overload def __init__( @@ -5718,8 +5468,7 @@ def __init__( *, stdout: str, stderr: str, - outcome: "_models.FunctionShellCallOutputOutcome", - created_by: Optional[str] = None, + outcome: "_models.FunctionShellCallOutputOutcomeParam", ) -> None: ... @overload @@ -5733,113 +5482,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class FunctionShellCallOutputContentParam(_Model): - """Shell output content. - - :ivar stdout: Captured stdout output for the shell call. Required. - :vartype stdout: str - :ivar stderr: Captured stderr output for the shell call. Required. - :vartype stderr: str - :ivar outcome: The exit or timeout outcome associated with this shell call. Required. - :vartype outcome: ~azure.ai.projects.models.FunctionShellCallOutputOutcomeParam - """ - - stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Captured stdout output for the shell call. Required.""" - stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Captured stderr output for the shell call. Required.""" - outcome: "_models.FunctionShellCallOutputOutcomeParam" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The exit or timeout outcome associated with this shell call. Required.""" - - @overload - def __init__( - self, - *, - stdout: str, - stderr: str, - outcome: "_models.FunctionShellCallOutputOutcomeParam", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FunctionShellCallOutputOutcome(_Model): - """Shell call outcome. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - FunctionShellCallOutputExitOutcome, FunctionShellCallOutputTimeoutOutcome - - :ivar type: Required. Known values are: "timeout" and "exit". - :vartype type: str or ~azure.ai.projects.models.FunctionShellCallOutputOutcomeType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"timeout\" and \"exit\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FunctionShellCallOutputExitOutcome(FunctionShellCallOutputOutcome, discriminator="exit"): - """Shell call exit outcome. - - :ivar type: The outcome type. Always ``exit``. Required. - :vartype type: str or ~azure.ai.projects.models.EXIT - :ivar exit_code: Exit code from the shell process. Required. - :vartype exit_code: int - """ - - type: Literal[FunctionShellCallOutputOutcomeType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The outcome type. Always ``exit``. Required.""" - exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Exit code from the shell process. Required.""" - - @overload - def __init__( - self, - *, - exit_code: int, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = FunctionShellCallOutputOutcomeType.EXIT # type: ignore - - -class FunctionShellCallOutputOutcomeParam(_Model): - """Shell call outcome. +class FunctionShellCallOutputOutcomeParam(_Model): + """Shell call outcome. You probably want to use the sub-classes and not this class directly. Known sub-classes are: FunctionShellCallOutputExitOutcomeParam, FunctionShellCallOutputTimeoutOutcomeParam @@ -5873,14 +5517,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FunctionShellCallOutputExitOutcomeParam(FunctionShellCallOutputOutcomeParam, discriminator="exit"): """Shell call exit outcome. - :ivar type: The outcome type. Always ``exit``. Required. + :ivar type: The outcome type. Always ``exit``. Required. EXIT. :vartype type: str or ~azure.ai.projects.models.EXIT :ivar exit_code: The exit code returned by the shell process. Required. :vartype exit_code: int """ type: Literal[FunctionShellCallOutputOutcomeParamType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The outcome type. Always ``exit``. Required.""" + """The outcome type. Always ``exit``. Required. EXIT.""" exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The exit code returned by the shell process. Required.""" @@ -5903,44 +5547,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = FunctionShellCallOutputOutcomeParamType.EXIT # type: ignore -class FunctionShellCallOutputTimeoutOutcome(FunctionShellCallOutputOutcome, discriminator="timeout"): - """Shell call timeout outcome. - - :ivar type: The outcome type. Always ``timeout``. Required. - :vartype type: str or ~azure.ai.projects.models.TIMEOUT - """ - - type: Literal[FunctionShellCallOutputOutcomeType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The outcome type. Always ``timeout``. Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = FunctionShellCallOutputOutcomeType.TIMEOUT # type: ignore - - class FunctionShellCallOutputTimeoutOutcomeParam( FunctionShellCallOutputOutcomeParam, discriminator="timeout" ): # pylint: disable=name-too-long """Shell call timeout outcome. - :ivar type: The outcome type. Always ``timeout``. Required. + :ivar type: The outcome type. Always ``timeout``. Required. TIMEOUT. :vartype type: str or ~azure.ai.projects.models.TIMEOUT """ type: Literal[FunctionShellCallOutputOutcomeParamType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The outcome type. Always ``timeout``. Required.""" + """The outcome type. Always ``timeout``. Required. TIMEOUT.""" @overload def __init__( @@ -5962,12 +5579,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FunctionShellToolParam(Tool, discriminator="shell"): """Shell tool. - :ivar type: The type of the shell tool. Always ``shell``. Required. + :ivar type: The type of the shell tool. Always ``shell``. Required. SHELL. :vartype type: str or ~azure.ai.projects.models.SHELL """ type: Literal[ToolType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the shell tool. Always ``shell``. Required.""" + """The type of the shell tool. Always ``shell``. Required. SHELL.""" @overload def __init__( @@ -5989,7 +5606,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class FunctionTool(Tool, discriminator="function"): """Function. - :ivar type: The type of the function tool. Always ``function``. Required. + :ivar type: The type of the function tool. Always ``function``. Required. FUNCTION. :vartype type: str or ~azure.ai.projects.models.FUNCTION :ivar name: The name of the function to call. Required. :vartype name: str @@ -6002,7 +5619,7 @@ class FunctionTool(Tool, discriminator="function"): """ type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool. Always ``function``. Required.""" + """The type of the function tool. Always ``function``. Required. FUNCTION.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the function to call. Required.""" description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6036,16 +5653,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): """The hosted agent definition. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ImageBasedHostedAgentDefinition - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. + :ivar kind: Required. HOSTED. :vartype kind: str or ~azure.ai.projects.models.HOSTED :ivar tools: An array of tools the hosted agent's model may call while generating a response. - You - can specify which tool to use by setting the ``tool_choice`` parameter. + You can specify which tool to use by setting the ``tool_choice`` parameter. :vartype tools: list[~azure.ai.projects.models.Tool] :ivar container_protocol_versions: The protocols that the agent supports for ingress communication of the containers. Required. @@ -6056,14 +5669,15 @@ class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): :vartype memory: str :ivar environment_variables: Environment variables to set in the hosted agent container. :vartype environment_variables: dict[str, str] + :ivar image: The image ID for the agent, applicable to image-based hosted agents. + :vartype image: str """ - __mapping__: dict[str, _Model] = {} kind: Literal[AgentKind.HOSTED] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + """Required. HOSTED.""" tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the hosted agent's model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter.""" + """An array of tools the hosted agent's model may call while generating a response. You can + specify which tool to use by setting the ``tool_choice`` parameter.""" container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -6076,6 +5690,8 @@ class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): visibility=["read", "create", "update", "delete", "query"] ) """Environment variables to set in the hosted agent container.""" + image: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The image ID for the agent, applicable to image-based hosted agents.""" @overload def __init__( @@ -6087,6 +5703,7 @@ def __init__( rai_config: Optional["_models.RaiConfig"] = None, tools: Optional[list["_models.Tool"]] = None, environment_variables: Optional[dict[str, str]] = None, + image: Optional[str] = None, ) -> None: ... @overload @@ -6194,68 +5811,18 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ImageBasedHostedAgentDefinition(HostedAgentDefinition, discriminator="hosted"): - """The image-based deployment definition for a hosted agent. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar tools: An array of tools the hosted agent's model may call while generating a response. - You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar cpu: The CPU configuration for the hosted agent. Required. - :vartype cpu: str - :ivar memory: The memory configuration for the hosted agent. Required. - :vartype memory: str - :ivar environment_variables: Environment variables to set in the hosted agent container. - :vartype environment_variables: dict[str, str] - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.HOSTED - :ivar image: The image for the hosted agent. Required. - :vartype image: str - """ - - image: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The image for the hosted agent. Required.""" - - @overload - def __init__( - self, - *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - cpu: str, - memory: str, - image: str, - rai_config: Optional["_models.RaiConfig"] = None, - tools: Optional[list["_models.Tool"]] = None, - environment_variables: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class ImageGenTool(Tool, discriminator="image_generation"): """Image generation tool. :ivar type: The type of the image generation tool. Always ``image_generation``. Required. + IMAGE_GENERATION. :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION :ivar model: Is one of the following types: Literal["gpt-image-1"], Literal["gpt-image-1-mini"], str :vartype model: str or str or str - :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, - or ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], - Literal["medium"], Literal["high"], Literal["auto"] + :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, or + ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], Literal["medium"], + Literal["high"], Literal["auto"] :vartype quality: str or str or str or str :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``, ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: @@ -6270,14 +5837,14 @@ class ImageGenTool(Tool, discriminator="image_generation"): :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a Literal["auto"] type or a Literal["low"] type. :vartype moderation: str or str - :ivar background: Background type for the generated image. One of ``transparent``, - ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal["transparent"], Literal["opaque"], Literal["auto"] + :ivar background: Background type for the generated image. One of ``transparent``, ``opaque``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal["transparent"], + Literal["opaque"], Literal["auto"] :vartype background: str or str or str :ivar input_fidelity: Known values are: "high" and "low". :vartype input_fidelity: str or ~azure.ai.projects.models.InputFidelity - :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` - (string, optional) and ``file_id`` (string, optional). + :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` (string, optional) + and ``file_id`` (string, optional). :vartype input_image_mask: ~azure.ai.projects.models.ImageGenToolInputImageMask :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default value) to 3. @@ -6285,7 +5852,7 @@ class ImageGenTool(Tool, discriminator="image_generation"): """ type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the image generation tool. Always ``image_generation``. Required.""" + """The type of the image generation tool. Always ``image_generation``. Required. IMAGE_GENERATION.""" model: Optional[Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], str]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -6293,21 +5860,20 @@ class ImageGenTool(Tool, discriminator="image_generation"): quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The quality of the generated image. One of ``low``, ``medium``, ``high``, - or ``auto``. Default: ``auto``. Is one of the following types: Literal[\"low\"], - Literal[\"medium\"], Literal[\"high\"], Literal[\"auto\"]""" + """The quality of the generated image. One of ``low``, ``medium``, ``high``, or ``auto``. Default: + ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"medium\"], + Literal[\"high\"], Literal[\"auto\"]""" size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The size of the generated image. One of ``1024x1024``, ``1024x1536``, - ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal[\"1024x1024\"], Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" + """The size of the generated image. One of ``1024x1024``, ``1024x1536``, ``1536x1024``, or + ``auto``. Default: ``auto``. Is one of the following types: Literal[\"1024x1024\"], + Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The output format of the generated image. One of ``png``, ``webp``, or - ``jpeg``. Default: ``png``. Is one of the following types: Literal[\"png\"], - Literal[\"webp\"], Literal[\"jpeg\"]""" + """The output format of the generated image. One of ``png``, ``webp``, or ``jpeg``. Default: + ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], Literal[\"jpeg\"]""" output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Compression level for the output image. Default: 100.""" moderation: Optional[Literal["auto", "low"]] = rest_field( @@ -6318,9 +5884,9 @@ class ImageGenTool(Tool, discriminator="image_generation"): background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Background type for the generated image. One of ``transparent``, - ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal[\"transparent\"], Literal[\"opaque\"], Literal[\"auto\"]""" + """Background type for the generated image. One of ``transparent``, ``opaque``, or ``auto``. + Default: ``auto``. Is one of the following types: Literal[\"transparent\"], + Literal[\"opaque\"], Literal[\"auto\"]""" input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -6328,8 +5894,8 @@ class ImageGenTool(Tool, discriminator="image_generation"): input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Optional mask for inpainting. Contains ``image_url`` - (string, optional) and ``file_id`` (string, optional).""" + """Optional mask for inpainting. Contains ``image_url`` (string, optional) and ``file_id`` + (string, optional).""" partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Number of partial images to generate in streaming mode, from 0 (default value) to 3.""" @@ -6427,7 +5993,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputContentInputFileContent(InputContent, discriminator="input_file"): """Input file. - :ivar type: The type of the input item. Always ``input_file``. Required. + :ivar type: The type of the input item. Always ``input_file``. Required. INPUT_FILE. :vartype type: str or ~azure.ai.projects.models.INPUT_FILE :ivar file_id: :vartype file_id: str @@ -6440,7 +6006,7 @@ class InputContentInputFileContent(InputContent, discriminator="input_file"): """ type: Literal[InputContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_file``. Required.""" + """The type of the input item. Always ``input_file``. Required. INPUT_FILE.""" file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the file to be sent to the model.""" @@ -6474,7 +6040,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputContentInputImageContent(InputContent, discriminator="input_image"): """Input image. - :ivar type: The type of the input item. Always ``input_image``. Required. + :ivar type: The type of the input item. Always ``input_image``. Required. INPUT_IMAGE. :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE :ivar image_url: :vartype image_url: str @@ -6486,7 +6052,7 @@ class InputContentInputImageContent(InputContent, discriminator="input_image"): """ type: Literal[InputContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_image``. Required.""" + """The type of the input item. Always ``input_image``. Required. INPUT_IMAGE.""" image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6517,14 +6083,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputContentInputTextContent(InputContent, discriminator="input_text"): """Input text. - :ivar type: The type of the input item. Always ``input_text``. Required. + :ivar type: The type of the input item. Always ``input_text``. Required. INPUT_TEXT. :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT :ivar text: The text input to the model. Required. :vartype text: str """ type: Literal[InputContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_text``. Required.""" + """The type of the input item. Always ``input_text``. Required. INPUT_TEXT.""" text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The text input to the model. Required.""" @@ -6639,7 +6205,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputItemApplyPatchToolCallItemParam(InputItem, discriminator="apply_patch_call"): """Apply patch tool call. - :ivar type: The type of the item. Always ``apply_patch_call``. Required. + :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL. :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL :ivar id: :vartype id: str @@ -6654,7 +6220,7 @@ class InputItemApplyPatchToolCallItemParam(InputItem, discriminator="apply_patch """ type: Literal[InputItemType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``apply_patch_call``. Required.""" + """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.""" id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the apply patch tool call generated by the model. Required.""" @@ -6696,6 +6262,7 @@ class InputItemApplyPatchToolCallOutputItemParam( """Apply patch tool call output. :ivar type: The type of the item. Always ``apply_patch_call_output``. Required. + APPLY_PATCH_CALL_OUTPUT. :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL_OUTPUT :ivar id: :vartype id: str @@ -6709,7 +6276,7 @@ class InputItemApplyPatchToolCallOutputItemParam( """ type: Literal[InputItemType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``apply_patch_call_output``. Required.""" + """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT.""" id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the apply patch tool call generated by the model. Required.""" @@ -6746,7 +6313,7 @@ class InputItemCodeInterpreterToolCall(InputItem, discriminator="code_interprete """Code interpreter tool call. :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``. - Required. + Required. CODE_INTERPRETER_CALL. :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL :ivar id: The unique ID of the code interpreter tool call. Required. :vartype id: str @@ -6765,7 +6332,8 @@ class InputItemCodeInterpreterToolCall(InputItem, discriminator="code_interprete """ type: Literal[InputItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.""" + """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required. + CODE_INTERPRETER_CALL.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the code interpreter tool call. Required.""" status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( @@ -6812,7 +6380,7 @@ class InputItemCompactionSummaryItemParam(InputItem, discriminator="compaction") :ivar id: :vartype id: str - :ivar type: The type of the item. Always ``compaction``. Required. + :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION. :vartype type: str or ~azure.ai.projects.models.COMPACTION :ivar encrypted_content: The encrypted content of the compaction summary. Required. :vartype encrypted_content: str @@ -6820,7 +6388,7 @@ class InputItemCompactionSummaryItemParam(InputItem, discriminator="compaction") id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) type: Literal[InputItemType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``compaction``. Required.""" + """The type of the item. Always ``compaction``. Required. COMPACTION.""" encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The encrypted content of the compaction summary. Required.""" @@ -6852,7 +6420,7 @@ class InputItemComputerCallOutputItemParam(InputItem, discriminator="computer_ca :ivar call_id: The ID of the computer tool call that produced the output. Required. :vartype call_id: str :ivar type: The type of the computer tool call output. Always ``computer_call_output``. - Required. + Required. COMPUTER_CALL_OUTPUT. :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT :ivar output: Required. :vartype output: ~azure.ai.projects.models.ComputerScreenshotImage @@ -6867,7 +6435,8 @@ class InputItemComputerCallOutputItemParam(InputItem, discriminator="computer_ca call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The ID of the computer tool call that produced the output. Required.""" type: Literal[InputItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer tool call output. Always ``computer_call_output``. Required.""" + """The type of the computer tool call output. Always ``computer_call_output``. Required. + COMPUTER_CALL_OUTPUT.""" output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field( @@ -6904,7 +6473,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputItemComputerToolCall(InputItem, discriminator="computer_call"): """Computer tool call. - :ivar type: The type of the computer call. Always ``computer_call``. Required. + :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL. :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL :ivar id: The unique ID of the computer call. Required. :vartype id: str @@ -6914,14 +6483,14 @@ class InputItemComputerToolCall(InputItem, discriminator="computer_call"): :vartype action: ~azure.ai.projects.models.ComputerAction :ivar pending_safety_checks: The pending safety checks for the computer call. Required. :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. + Populated when items are returned via API. Required. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] :vartype status: str or str or str """ type: Literal[InputItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer call. Always ``computer_call``. Required.""" + """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the computer call. Required.""" call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6935,9 +6504,9 @@ class InputItemComputerToolCall(InputItem, discriminator="computer_call"): status: Literal["in_progress", "completed", "incomplete"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated + when items are returned via API. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( @@ -6966,6 +6535,7 @@ class InputItemCustomToolCall(InputItem, discriminator="custom_tool_call"): """Custom tool call. :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required. + CUSTOM_TOOL_CALL. :vartype type: str or ~azure.ai.projects.models.CUSTOM_TOOL_CALL :ivar id: The unique ID of the custom tool call in the OpenAI platform. :vartype id: str @@ -6978,7 +6548,7 @@ class InputItemCustomToolCall(InputItem, discriminator="custom_tool_call"): """ type: Literal[InputItemType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the custom tool call. Always ``custom_tool_call``. Required.""" + """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL.""" id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the custom tool call in the OpenAI platform.""" call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7014,21 +6584,22 @@ class InputItemCustomToolCallOutput(InputItem, discriminator="custom_tool_call_o """Custom tool call output. :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``. - Required. + Required. CUSTOM_TOOL_CALL_OUTPUT. :vartype type: str or ~azure.ai.projects.models.CUSTOM_TOOL_CALL_OUTPUT :ivar id: The unique ID of the custom tool call output in the OpenAI platform. :vartype id: str :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call. Required. :vartype call_id: str - :ivar output: The output from the custom tool call generated by your code. - Can be a string or an list of output content. Required. Is either a str type or a + :ivar output: The output from the custom tool call generated by your code. Can be a string or + an list of output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type. :vartype output: str or list[~azure.ai.projects.models.FunctionAndCustomToolCallOutput] """ type: Literal[InputItemType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.""" + """The type of the custom tool call output. Always ``custom_tool_call_output``. Required. + CUSTOM_TOOL_CALL_OUTPUT.""" id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the custom tool call output in the OpenAI platform.""" call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7036,9 +6607,8 @@ class InputItemCustomToolCallOutput(InputItem, discriminator="custom_tool_call_o output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The output from the custom tool call generated by your code. - Can be a string or an list of output content. Required. Is either a str type or a - [FunctionAndCustomToolCallOutput] type.""" + """The output from the custom tool call generated by your code. Can be a string or an list of + output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type.""" @overload def __init__( @@ -7067,11 +6637,11 @@ class InputItemFileSearchToolCall(InputItem, discriminator="file_search_call"): :ivar id: The unique ID of the file search tool call. Required. :vartype id: str :ivar type: The type of the file search tool call. Always ``file_search_call``. Required. + FILE_SEARCH_CALL. :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL - :ivar status: The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], - Literal["failed"] + :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``, + ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"], + Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"] :vartype status: str or str or str or str or str :ivar queries: The queries used to search for files. Required. :vartype queries: list[str] @@ -7082,14 +6652,13 @@ class InputItemFileSearchToolCall(InputItem, discriminator="file_search_call"): id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the file search tool call. Required.""" type: Literal[InputItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file search tool call. Always ``file_search_call``. Required.""" + """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL.""" status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" + """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete`` + or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"], + Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]""" queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The queries used to search for files. Required.""" results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field( @@ -7126,7 +6695,7 @@ class InputItemFunctionCallOutputItemParam(InputItem, discriminator="function_ca :ivar call_id: The unique ID of the function tool call generated by the model. Required. :vartype call_id: str :ivar type: The type of the function tool call output. Always ``function_call_output``. - Required. + Required. FUNCTION_CALL_OUTPUT. :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT :ivar output: Text, image, or file output of the function tool call. Required. Is either a str type or a [Union["_models.InputTextContentParam", "_models.InputImageContentParamAutoParam", @@ -7142,7 +6711,8 @@ class InputItemFunctionCallOutputItemParam(InputItem, discriminator="function_ca call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the function tool call generated by the model. Required.""" type: Literal[InputItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool call output. Always ``function_call_output``. Required.""" + """The type of the function tool call output. Always ``function_call_output``. Required. + FUNCTION_CALL_OUTPUT.""" output: Union[ str, list[ @@ -7199,7 +6769,7 @@ class InputItemFunctionShellCallItemParam(InputItem, discriminator="shell_call") :vartype id: str :ivar call_id: The unique ID of the shell tool call generated by the model. Required. :vartype call_id: str - :ivar type: The type of the item. Always ``shell_call``. Required. + :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL. :vartype type: str or ~azure.ai.projects.models.SHELL_CALL :ivar action: The shell commands and limits that describe how to run the tool call. Required. :vartype action: ~azure.ai.projects.models.FunctionShellActionParam @@ -7211,7 +6781,7 @@ class InputItemFunctionShellCallItemParam(InputItem, discriminator="shell_call") call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the shell tool call generated by the model. Required.""" type: Literal[InputItemType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``shell_call``. Required.""" + """The type of the item. Always ``shell_call``. Required. SHELL_CALL.""" action: "_models.FunctionShellActionParam" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The shell commands and limits that describe how to run the tool call. Required.""" status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = rest_field( @@ -7250,7 +6820,7 @@ class InputItemFunctionShellCallOutputItemParam( :vartype id: str :ivar call_id: The unique ID of the shell tool call generated by the model. Required. :vartype call_id: str - :ivar type: The type of the item. Always ``shell_call_output``. Required. + :ivar type: The type of the item. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT. :vartype type: str or ~azure.ai.projects.models.SHELL_CALL_OUTPUT :ivar output: Captured chunks of stdout and stderr output, along with their associated outcomes. Required. @@ -7263,7 +6833,7 @@ class InputItemFunctionShellCallOutputItemParam( call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the shell tool call generated by the model. Required.""" type: Literal[InputItemType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``shell_call_output``. Required.""" + """The type of the item. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT.""" output: list["_models.FunctionShellCallOutputContentParam"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -7298,6 +6868,7 @@ class InputItemFunctionToolCall(InputItem, discriminator="function_call"): :ivar id: The unique ID of the function tool call. :vartype id: str :ivar type: The type of the function tool call. Always ``function_call``. Required. + FUNCTION_CALL. :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL :ivar call_id: The unique ID of the function tool call generated by the model. Required. :vartype call_id: str @@ -7305,8 +6876,8 @@ class InputItemFunctionToolCall(InputItem, discriminator="function_call"): :vartype name: str :ivar arguments: A JSON string of the arguments to pass to the function. Required. :vartype arguments: str - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. + Populated when items are returned via API. Is one of the following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] :vartype status: str or str or str """ @@ -7314,7 +6885,7 @@ class InputItemFunctionToolCall(InputItem, discriminator="function_call"): id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the function tool call.""" type: Literal[InputItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool call. Always ``function_call``. Required.""" + """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL.""" call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the function tool call generated by the model. Required.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7324,9 +6895,9 @@ class InputItemFunctionToolCall(InputItem, discriminator="function_call"): status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated + when items are returned via API. Is one of the following types: Literal[\"in_progress\"], + Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( @@ -7355,6 +6926,7 @@ class InputItemImageGenToolCall(InputItem, discriminator="image_generation_call" """Image generation call. :ivar type: The type of the image generation call. Always ``image_generation_call``. Required. + IMAGE_GENERATION_CALL. :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL :ivar id: The unique ID of the image generation call. Required. :vartype id: str @@ -7366,7 +6938,8 @@ class InputItemImageGenToolCall(InputItem, discriminator="image_generation_call" """ type: Literal[InputItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the image generation call. Always ``image_generation_call``. Required.""" + """The type of the image generation call. Always ``image_generation_call``. Required. + IMAGE_GENERATION_CALL.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the image generation call. Required.""" status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( @@ -7402,6 +6975,7 @@ class InputItemLocalShellToolCall(InputItem, discriminator="local_shell_call"): """Local shell call. :ivar type: The type of the local shell call. Always ``local_shell_call``. Required. + LOCAL_SHELL_CALL. :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL :ivar id: The unique ID of the local shell call. Required. :vartype id: str @@ -7415,7 +6989,7 @@ class InputItemLocalShellToolCall(InputItem, discriminator="local_shell_call"): """ type: Literal[InputItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell call. Always ``local_shell_call``. Required.""" + """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the local shell call. Required.""" call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7454,7 +7028,7 @@ class InputItemLocalShellToolCallOutput(InputItem, discriminator="local_shell_ca """Local shell call output. :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``. - Required. + Required. LOCAL_SHELL_CALL_OUTPUT. :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT :ivar id: The unique ID of the local shell tool call generated by the model. Required. :vartype id: str @@ -7466,7 +7040,8 @@ class InputItemLocalShellToolCallOutput(InputItem, discriminator="local_shell_ca """ type: Literal[InputItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.""" + """The type of the local shell tool call output. Always ``local_shell_call_output``. Required. + LOCAL_SHELL_CALL_OUTPUT.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the local shell tool call generated by the model. Required.""" output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7502,6 +7077,7 @@ class InputItemMcpApprovalRequest(InputItem, discriminator="mcp_approval_request """MCP approval request. :ivar type: The type of the item. Always ``mcp_approval_request``. Required. + MCP_APPROVAL_REQUEST. :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST :ivar id: The unique ID of the approval request. Required. :vartype id: str @@ -7514,7 +7090,7 @@ class InputItemMcpApprovalRequest(InputItem, discriminator="mcp_approval_request """ type: Literal[InputItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_approval_request``. Required.""" + """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the approval request. Required.""" server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7550,6 +7126,7 @@ class InputItemMcpApprovalResponse(InputItem, discriminator="mcp_approval_respon """MCP approval response. :ivar type: The type of the item. Always ``mcp_approval_response``. Required. + MCP_APPROVAL_RESPONSE. :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE :ivar id: :vartype id: str @@ -7562,7 +7139,7 @@ class InputItemMcpApprovalResponse(InputItem, discriminator="mcp_approval_respon """ type: Literal[InputItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_approval_response``. Required.""" + """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE.""" id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The ID of the approval request being answered. Required.""" @@ -7595,7 +7172,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputItemMcpListTools(InputItem, discriminator="mcp_list_tools"): """MCP list tools. - :ivar type: The type of the item. Always ``mcp_list_tools``. Required. + :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS. :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS :ivar id: The unique ID of the list. Required. :vartype id: str @@ -7608,7 +7185,7 @@ class InputItemMcpListTools(InputItem, discriminator="mcp_list_tools"): """ type: Literal[InputItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_list_tools``. Required.""" + """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the list. Required.""" server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7642,7 +7219,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputItemMcpToolCall(InputItem, discriminator="mcp_call"): """MCP tool call. - :ivar type: The type of the item. Always ``mcp_call``. Required. + :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL. :vartype type: str or ~azure.ai.projects.models.MCP_CALL :ivar id: The unique ID of the tool call. Required. :vartype id: str @@ -7665,7 +7242,7 @@ class InputItemMcpToolCall(InputItem, discriminator="mcp_call"): """ type: Literal[InputItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_call``. Required.""" + """The type of the item. Always ``mcp_call``. Required. MCP_CALL.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the tool call. Required.""" server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7715,7 +7292,7 @@ class InputItemOutputMessage(InputItem, discriminator="output_message"): :ivar id: The unique ID of the output message. Required. :vartype id: str - :ivar type: The type of the output message. Always ``message``. Required. + :ivar type: The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE. :vartype type: str or ~azure.ai.projects.models.OUTPUT_MESSAGE :ivar role: The role of the output message. Always ``assistant``. Required. Default value is "assistant". @@ -7731,7 +7308,7 @@ class InputItemOutputMessage(InputItem, discriminator="output_message"): id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the output message. Required.""" type: Literal[InputItemType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output message. Always ``message``. Required.""" + """The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE.""" role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\".""" content: list["_models.OutputMessageContent"] = rest_field( @@ -7741,9 +7318,9 @@ class InputItemOutputMessage(InputItem, discriminator="output_message"): status: Literal["in_progress", "completed", "incomplete"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the message input. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when input items are returned via API. Required. Is one of the - following types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``. + Populated when input items are returned via API. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( @@ -7770,7 +7347,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class InputItemReasoningItem(InputItem, discriminator="reasoning"): """Reasoning. - :ivar type: The type of the object. Always ``reasoning``. Required. + :ivar type: The type of the object. Always ``reasoning``. Required. REASONING. :vartype type: str or ~azure.ai.projects.models.REASONING :ivar id: The unique identifier of the reasoning content. Required. :vartype id: str @@ -7780,14 +7357,14 @@ class InputItemReasoningItem(InputItem, discriminator="reasoning"): :vartype summary: list[~azure.ai.projects.models.Summary] :ivar content: Reasoning text content. :vartype content: list[~azure.ai.projects.models.ReasoningTextContent] - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. + Populated when items are returned via API. Is one of the following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] :vartype status: str or str or str """ type: Literal[InputItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the object. Always ``reasoning``. Required.""" + """The type of the object. Always ``reasoning``. Required. REASONING.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique identifier of the reasoning content. Required.""" encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7800,9 +7377,9 @@ class InputItemReasoningItem(InputItem, discriminator="reasoning"): status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated + when items are returned via API. Is one of the following types: Literal[\"in_progress\"], + Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( @@ -7833,13 +7410,14 @@ class InputItemWebSearchToolCall(InputItem, discriminator="web_search_call"): :ivar id: The unique ID of the web search tool call. Required. :vartype id: str :ivar type: The type of the web search tool call. Always ``web_search_call``. Required. + WEB_SEARCH_CALL. :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL :ivar status: The status of the web search tool call. Required. Is one of the following types: Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] :vartype status: str or str or str or str - :ivar action: An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. Is one of - the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind + :ivar action: An object describing the specific action taken in this web search call. Includes + details on how the model used the web (search, open_page, find). Required. Is one of the + following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind :vartype action: ~azure.ai.projects.models.WebSearchActionSearch or ~azure.ai.projects.models.WebSearchActionOpenPage or ~azure.ai.projects.models.WebSearchActionFind @@ -7848,7 +7426,7 @@ class InputItemWebSearchToolCall(InputItem, discriminator="web_search_call"): id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique ID of the web search tool call. Required.""" type: Literal[InputItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the web search tool call. Always ``web_search_call``. Required.""" + """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL.""" status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -7857,9 +7435,9 @@ class InputItemWebSearchToolCall(InputItem, discriminator="web_search_call"): action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = ( rest_field(visibility=["read", "create", "update", "delete", "query"]) ) - """An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. Is one of - the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind""" + """An object describing the specific action taken in this web search call. Includes details on how + the model used the web (search, open_page, find). Required. Is one of the following types: + WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind""" @overload def __init__( @@ -7884,130 +7462,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = InputItemType.WEB_SEARCH_CALL # type: ignore -class ItemResource(_Model): - """Content item used to generate a response. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ItemResourceApplyPatchToolCall, ItemResourceApplyPatchToolCallOutput, - ItemResourceCodeInterpreterToolCall, ItemResourceComputerToolCall, - ItemResourceComputerToolCallOutputResource, ItemResourceFileSearchToolCall, - ItemResourceFunctionToolCallResource, ItemResourceFunctionToolCallOutputResource, - ItemResourceImageGenToolCall, ItemResourceLocalShellToolCall, - ItemResourceLocalShellToolCallOutput, ItemResourceMcpApprovalRequest, - ItemResourceMcpApprovalResponseResource, ItemResourceMcpToolCall, ItemResourceMcpListTools, - MemorySearchToolCallItemResource, InputMessageResource, OAuthConsentRequestItemResource, - ItemResourceOutputMessage, ItemResourceFunctionShellCall, ItemResourceFunctionShellCallOutput, - StructuredOutputsItemResource, ItemResourceWebSearchToolCall, WorkflowActionOutputItemResource - - :ivar type: Required. Known values are: "message", "output_message", "file_search_call", - "computer_call", "computer_call_output", "web_search_call", "function_call", - "function_call_output", "image_generation_call", "code_interpreter_call", "local_shell_call", - "local_shell_call_output", "shell_call", "shell_call_output", "apply_patch_call", - "apply_patch_call_output", "mcp_list_tools", "mcp_approval_request", "mcp_approval_response", - "mcp_call", "structured_outputs", "workflow_action", "memory_search_call", and - "oauth_consent_request". - :vartype type: str or ~azure.ai.projects.models.ItemResourceType - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"message\", \"output_message\", \"file_search_call\", - \"computer_call\", \"computer_call_output\", \"web_search_call\", \"function_call\", - \"function_call_output\", \"image_generation_call\", \"code_interpreter_call\", - \"local_shell_call\", \"local_shell_call_output\", \"shell_call\", \"shell_call_output\", - \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\", - \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", - \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" - created_by: Optional[Union["_models.CreatedBy", str]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The information about the creator of the item. Is either a CreatedBy type or a str type.""" - - @overload - def __init__( - self, - *, - type: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class InputMessageResource(ItemResource, discriminator="message"): - """InputMessageResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the message input. Always set to ``message``. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message input. One of ``user``, ``system``, or ``developer``. - Required. Is one of the following types: Literal["user"], Literal["system"], - Literal["developer"] - :vartype role: str or str or str - :ivar status: The status of item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar content: Required. - :vartype content: list[~azure.ai.projects.models.InputContent] - :ivar id: The unique ID of the message input. Required. - :vartype id: str - """ - - type: Literal[ItemResourceType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the message input. Always set to ``message``. Required.""" - role: Literal["user", "system", "developer"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The role of the message input. One of ``user``, ``system``, or ``developer``. Required. Is one - of the following types: Literal[\"user\"], Literal[\"system\"], Literal[\"developer\"]""" - status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - content: list["_models.InputContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the message input. Required.""" - - @overload - def __init__( - self, - *, - role: Literal["user", "system", "developer"], - content: list["_models.InputContent"], - id: str, # pylint: disable=redefined-builtin - created_by: Optional[Union["_models.CreatedBy", str]] = None, - status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.MESSAGE # type: ignore - - class InputTextContentParam(_Model): """Input text. @@ -8294,1142 +7748,21 @@ class InsightSummary(_Model): unique_cluster_count: int = rest_field( name="uniqueClusterCount", visibility=["read", "create", "update", "delete", "query"] ) - """Total number of unique clusters. Required.""" - method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Method used for clustering. Required.""" - usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Token usage while performing clustering analysis. Required.""" - - @overload - def __init__( - self, - *, - sample_count: int, - unique_subcluster_count: int, - unique_cluster_count: int, - method: str, - usage: "_models.ClusterTokenUsage", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ItemReferenceParam(InputItem, discriminator="item_reference"): - """Item reference. - - :ivar type: The type of item to reference. Always ``item_reference``. Required. - :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE - :ivar id: The ID of the item to reference. Required. - :vartype id: str - """ - - type: Literal[InputItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of item to reference. Always ``item_reference``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item to reference. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InputItemType.ITEM_REFERENCE # type: ignore - - -class ItemResourceApplyPatchToolCall(ItemResource, discriminator="apply_patch_call"): - """Apply patch tool call. - - :ivar type: The type of the item. Always ``apply_patch_call``. Required. - :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL - :ivar id: The unique ID of the apply patch tool call. Populated when this item is returned via - API. Required. - :vartype id: str - :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required. - :vartype call_id: str - :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``. - Required. Known values are: "in_progress" and "completed". - :vartype status: str or ~azure.ai.projects.models.ApplyPatchCallStatus - :ivar operation: One of the create_file, delete_file, or update_file operations applied via - apply_patch. Required. - :vartype operation: ~azure.ai.projects.models.ApplyPatchFileOperation - :ivar created_by: The ID of the entity that created this tool call. - :vartype created_by: str - """ - - type: Literal[ItemResourceType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``apply_patch_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the apply patch tool call. Populated when this item is returned via API. - Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the apply patch tool call generated by the model. Required.""" - status: Union[str, "_models.ApplyPatchCallStatus"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required. - Known values are: \"in_progress\" and \"completed\".""" - operation: "_models.ApplyPatchFileOperation" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """One of the create_file, delete_file, or update_file operations applied via apply_patch. - Required.""" - created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] - visibility=["read", "create", "update", "delete", "query"] - ) - """The ID of the entity that created this tool call.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - call_id: str, - status: Union[str, "_models.ApplyPatchCallStatus"], - operation: "_models.ApplyPatchFileOperation", - created_by: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.APPLY_PATCH_CALL # type: ignore - - -class ItemResourceApplyPatchToolCallOutput(ItemResource, discriminator="apply_patch_call_output"): - """Apply patch tool call output. - - :ivar type: The type of the item. Always ``apply_patch_call_output``. Required. - :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL_OUTPUT - :ivar id: The unique ID of the apply patch tool call output. Populated when this item is - returned via API. Required. - :vartype id: str - :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required. - :vartype call_id: str - :ivar status: The status of the apply patch tool call output. One of ``completed`` or - ``failed``. Required. Known values are: "completed" and "failed". - :vartype status: str or ~azure.ai.projects.models.ApplyPatchCallOutputStatus - :ivar output: - :vartype output: str - :ivar created_by: The ID of the entity that created this tool call output. - :vartype created_by: str - """ - - type: Literal[ItemResourceType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``apply_patch_call_output``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the apply patch tool call output. Populated when this item is returned via - API. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the apply patch tool call generated by the model. Required.""" - status: Union[str, "_models.ApplyPatchCallOutputStatus"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required. - Known values are: \"completed\" and \"failed\".""" - output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] - visibility=["read", "create", "update", "delete", "query"] - ) - """The ID of the entity that created this tool call output.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - call_id: str, - status: Union[str, "_models.ApplyPatchCallOutputStatus"], - output: Optional[str] = None, - created_by: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.APPLY_PATCH_CALL_OUTPUT # type: ignore - - -class ItemResourceCodeInterpreterToolCall(ItemResource, discriminator="code_interpreter_call"): - """Code interpreter tool call. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``. - Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL - :ivar id: The unique ID of the code interpreter tool call. Required. - :vartype id: str - :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``, - ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the - following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"], - Literal["interpreting"], Literal["failed"] - :vartype status: str or str or str or str or str - :ivar container_id: The ID of the container used to run the code. Required. - :vartype container_id: str - :ivar code: Required. - :vartype code: str - :ivar outputs: Required. - :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutputLogs or - ~azure.ai.projects.models.CodeInterpreterOutputImage] - """ - - type: Literal[ItemResourceType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the code interpreter tool call. Required.""" - status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``, - ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"], - Literal[\"interpreting\"], Literal[\"failed\"]""" - container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the container used to run the code. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], - container_id: str, - code: str, - outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.CODE_INTERPRETER_CALL # type: ignore - - -class ItemResourceComputerToolCall(ItemResource, discriminator="computer_call"): - """Computer tool call. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the computer call. Always ``computer_call``. Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL - :ivar id: The unique ID of the computer call. Required. - :vartype id: str - :ivar call_id: An identifier used when responding to the tool call with output. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.ComputerAction - :ivar pending_safety_checks: The pending safety checks for the computer call. Required. - :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - """ - - type: Literal[ItemResourceType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer call. Always ``computer_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the computer call. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An identifier used when responding to the tool call with output. Required.""" - action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The pending safety checks for the computer call. Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - call_id: str, - action: "_models.ComputerAction", - pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"], - status: Literal["in_progress", "completed", "incomplete"], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.COMPUTER_CALL # type: ignore - - -class ItemResourceComputerToolCallOutputResource( - ItemResource, discriminator="computer_call_output" -): # pylint: disable=name-too-long - """ItemResourceComputerToolCallOutputResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the computer tool call output. Always ``computer_call_output``. - Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT - :ivar id: The ID of the computer tool call output. - :vartype id: str - :ivar call_id: The ID of the computer tool call that produced the output. Required. - :vartype call_id: str - :ivar acknowledged_safety_checks: The safety checks reported by the API that have been - acknowledged by the developer. - :vartype acknowledged_safety_checks: - list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] - :ivar output: Required. - :vartype output: ~azure.ai.projects.models.ComputerScreenshotImage - :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when input items are returned via API. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - """ - - type: Literal[ItemResourceType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer tool call output. Always ``computer_call_output``. Required.""" - id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the computer tool call output.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the computer tool call that produced the output. Required.""" - acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The safety checks reported by the API that have been acknowledged by the - developer.""" - output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the message input. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when input items are returned via API. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - - @overload - def __init__( - self, - *, - call_id: str, - output: "_models.ComputerScreenshotImage", - created_by: Optional[Union["_models.CreatedBy", str]] = None, - id: Optional[str] = None, # pylint: disable=redefined-builtin - acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None, - status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.COMPUTER_CALL_OUTPUT # type: ignore - - -class ItemResourceFileSearchToolCall(ItemResource, discriminator="file_search_call"): - """File search tool call. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar id: The unique ID of the file search tool call. Required. - :vartype id: str - :ivar type: The type of the file search tool call. Always ``file_search_call``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL - :ivar status: The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], - Literal["failed"] - :vartype status: str or str or str or str or str - :ivar queries: The queries used to search for files. Required. - :vartype queries: list[str] - :ivar results: - :vartype results: list[~azure.ai.projects.models.FileSearchToolCallResults] - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the file search tool call. Required.""" - type: Literal[ItemResourceType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file search tool call. Always ``file_search_call``. Required.""" - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" - queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The queries used to search for files. Required.""" - results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], - queries: list[str], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - results: Optional[list["_models.FileSearchToolCallResults"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.FILE_SEARCH_CALL # type: ignore - - -class ItemResourceFunctionShellCall(ItemResource, discriminator="shell_call"): - """Shell tool call. - - :ivar type: The type of the item. Always ``shell_call``. Required. - :vartype type: str or ~azure.ai.projects.models.SHELL_CALL - :ivar id: The unique ID of the shell tool call. Populated when this item is returned via API. - Required. - :vartype id: str - :ivar call_id: The unique ID of the shell tool call generated by the model. Required. - :vartype call_id: str - :ivar action: The shell commands and limits that describe how to run the tool call. Required. - :vartype action: ~azure.ai.projects.models.FunctionShellAction - :ivar status: The status of the shell call. One of ``in_progress``, ``completed``, or - ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete". - :vartype status: str or ~azure.ai.projects.models.LocalShellCallStatus - :ivar created_by: The ID of the entity that created this tool call. - :vartype created_by: str - """ - - type: Literal[ItemResourceType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``shell_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the shell tool call. Populated when this item is returned via API. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the shell tool call generated by the model. Required.""" - action: "_models.FunctionShellAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The shell commands and limits that describe how to run the tool call. Required.""" - status: Union[str, "_models.LocalShellCallStatus"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the shell call. One of ``in_progress``, ``completed``, or ``incomplete``. - Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\".""" - created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] - visibility=["read", "create", "update", "delete", "query"] - ) - """The ID of the entity that created this tool call.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - call_id: str, - action: "_models.FunctionShellAction", - status: Union[str, "_models.LocalShellCallStatus"], - created_by: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.SHELL_CALL # type: ignore - - -class ItemResourceFunctionShellCallOutput(ItemResource, discriminator="shell_call_output"): - """Shell call output. - - :ivar type: The type of the shell call output. Always ``shell_call_output``. Required. - :vartype type: str or ~azure.ai.projects.models.SHELL_CALL_OUTPUT - :ivar id: The unique ID of the shell call output. Populated when this item is returned via API. - Required. - :vartype id: str - :ivar call_id: The unique ID of the shell tool call generated by the model. Required. - :vartype call_id: str - :ivar output: An array of shell call output contents. Required. - :vartype output: list[~azure.ai.projects.models.FunctionShellCallOutputContent] - :ivar max_output_length: Required. - :vartype max_output_length: int - :ivar created_by: The identifier of the actor that created the item. - :vartype created_by: str - """ - - type: Literal[ItemResourceType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the shell call output. Always ``shell_call_output``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the shell call output. Populated when this item is returned via API. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the shell tool call generated by the model. Required.""" - output: list["_models.FunctionShellCallOutputContent"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """An array of shell call output contents. Required.""" - max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] - visibility=["read", "create", "update", "delete", "query"] - ) - """The identifier of the actor that created the item.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - call_id: str, - output: list["_models.FunctionShellCallOutputContent"], - max_output_length: int, - created_by: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.SHELL_CALL_OUTPUT # type: ignore - - -class ItemResourceFunctionToolCallOutputResource( - ItemResource, discriminator="function_call_output" -): # pylint: disable=name-too-long - """ItemResourceFunctionToolCallOutputResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar id: The unique ID of the function tool call output. Populated when this item - is returned via API. - :vartype id: str - :ivar type: The type of the function tool call output. Always ``function_call_output``. - Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar output: The output from the function call generated by your code. - Can be a string or an list of output content. Required. Is either a str type or a - [FunctionAndCustomToolCallOutput] type. - :vartype output: str or list[~azure.ai.projects.models.FunctionAndCustomToolCallOutput] - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - """ - - id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call output. Populated when this item - is returned via API.""" - type: Literal[ItemResourceType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool call output. Always ``function_call_output``. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The output from the function call generated by your code. - Can be a string or an list of output content. Required. Is either a str type or a - [FunctionAndCustomToolCallOutput] type.""" - status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - - @overload - def __init__( - self, - *, - call_id: str, - output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - id: Optional[str] = None, # pylint: disable=redefined-builtin - status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.FUNCTION_CALL_OUTPUT # type: ignore - - -class ItemResourceFunctionToolCallResource(ItemResource, discriminator="function_call"): - """ItemResourceFunctionToolCallResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar id: The unique ID of the function tool call. - :vartype id: str - :ivar type: The type of the function tool call. Always ``function_call``. Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar name: The name of the function to run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments to pass to the function. Required. - :vartype arguments: str - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - """ - - id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call.""" - type: Literal[ItemResourceType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool call. Always ``function_call``. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments to pass to the function. Required.""" - status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - - @overload - def __init__( - self, - *, - call_id: str, - name: str, - arguments: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - id: Optional[str] = None, # pylint: disable=redefined-builtin - status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.FUNCTION_CALL # type: ignore - - -class ItemResourceImageGenToolCall(ItemResource, discriminator="image_generation_call"): - """Image generation call. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the image generation call. Always ``image_generation_call``. Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL - :ivar id: The unique ID of the image generation call. Required. - :vartype id: str - :ivar status: The status of the image generation call. Required. Is one of the following types: - Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"] - :vartype status: str or str or str or str - :ivar result: Required. - :vartype result: str - """ - - type: Literal[ItemResourceType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the image generation call. Always ``image_generation_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the image generation call. Required.""" - status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the image generation call. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]""" - result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "generating", "failed"], - result: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.IMAGE_GENERATION_CALL # type: ignore - - -class ItemResourceLocalShellToolCall(ItemResource, discriminator="local_shell_call"): - """Local shell call. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the local shell call. Always ``local_shell_call``. Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL - :ivar id: The unique ID of the local shell call. Required. - :vartype id: str - :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.LocalShellExecAction - :ivar status: The status of the local shell call. Required. Is one of the following types: - Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - """ - - type: Literal[ItemResourceType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell call. Always ``local_shell_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell call. Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell tool call generated by the model. Required.""" - action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the local shell call. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - call_id: str, - action: "_models.LocalShellExecAction", - status: Literal["in_progress", "completed", "incomplete"], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.LOCAL_SHELL_CALL # type: ignore - - -class ItemResourceLocalShellToolCallOutput(ItemResource, discriminator="local_shell_call_output"): - """Local shell call output. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``. - Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT - :ivar id: The unique ID of the local shell tool call generated by the model. Required. - :vartype id: str - :ivar output: A JSON string of the output of the local shell tool call. Required. - :vartype output: str - :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"], - Literal["incomplete"] - :vartype status: str or str or str - """ - - type: Literal[ItemResourceType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell tool call generated by the model. Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the local shell tool call. Required.""" - status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"]""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - output: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.LOCAL_SHELL_CALL_OUTPUT # type: ignore - - -class ItemResourceMcpApprovalRequest(ItemResource, discriminator="mcp_approval_request"): - """MCP approval request. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the item. Always ``mcp_approval_request``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST - :ivar id: The unique ID of the approval request. Required. - :vartype id: str - :ivar server_label: The label of the MCP server making the request. Required. - :vartype server_label: str - :ivar name: The name of the tool to run. Required. - :vartype name: str - :ivar arguments: A JSON string of arguments for the tool. Required. - :vartype arguments: str - """ - - type: Literal[ItemResourceType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_approval_request``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the approval request. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server making the request. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of arguments for the tool. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - name: str, - arguments: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.MCP_APPROVAL_REQUEST # type: ignore - - -class ItemResourceMcpApprovalResponseResource(ItemResource, discriminator="mcp_approval_response"): - """MCP approval response. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the item. Always ``mcp_approval_response``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE - :ivar id: The unique ID of the approval response. Required. - :vartype id: str - :ivar approval_request_id: The ID of the approval request being answered. Required. - :vartype approval_request_id: str - :ivar approve: Whether the request was approved. Required. - :vartype approve: bool - :ivar reason: - :vartype reason: str - """ - - type: Literal[ItemResourceType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_approval_response``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the approval response. Required.""" - approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the approval request being answered. Required.""" - approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the request was approved. Required.""" - reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - approval_request_id: str, - approve: bool, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - reason: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.MCP_APPROVAL_RESPONSE # type: ignore - - -class ItemResourceMcpListTools(ItemResource, discriminator="mcp_list_tools"): - """MCP list tools. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the item. Always ``mcp_list_tools``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS - :ivar id: The unique ID of the list. Required. - :vartype id: str - :ivar server_label: The label of the MCP server. Required. - :vartype server_label: str - :ivar tools: The tools available on the server. Required. - :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] - :ivar error: - :vartype error: str - """ - - type: Literal[ItemResourceType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_list_tools``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the list. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server. Required.""" - tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The tools available on the server. Required.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - tools: list["_models.MCPListToolsTool"], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - error: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.MCP_LIST_TOOLS # type: ignore - - -class ItemResourceMcpToolCall(ItemResource, discriminator="mcp_call"): - """MCP tool call. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: The type of the item. Always ``mcp_call``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP_CALL - :ivar id: The unique ID of the tool call. Required. - :vartype id: str - :ivar server_label: The label of the MCP server running the tool. Required. - :vartype server_label: str - :ivar name: The name of the tool that was run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments passed to the tool. Required. - :vartype arguments: str - :ivar output: - :vartype output: str - :ivar error: - :vartype error: str - :ivar status: The status of the tool call. One of ``in_progress``, ``completed``, - ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed", - "incomplete", "calling", and "failed". - :vartype status: str or ~azure.ai.projects.models.MCPToolCallStatus - :ivar approval_request_id: - :vartype approval_request_id: str - """ - - type: Literal[ItemResourceType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the item. Always ``mcp_call``. Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the tool call. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server running the tool. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool that was run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments passed to the tool. Required.""" - output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``, - ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\", - \"calling\", and \"failed\".""" - approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - name: str, - arguments: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - output: Optional[str] = None, - error: Optional[str] = None, - status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None, - approval_request_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.MCP_CALL # type: ignore - - -class ItemResourceOutputMessage(ItemResource, discriminator="output_message"): - """Output message. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar id: The unique ID of the output message. Required. - :vartype id: str - :ivar type: The type of the output message. Always ``message``. Required. - :vartype type: str or ~azure.ai.projects.models.OUTPUT_MESSAGE - :ivar role: The role of the output message. Always ``assistant``. Required. Default value is - "assistant". - :vartype role: str - :ivar content: The content of the output message. Required. - :vartype content: list[~azure.ai.projects.models.OutputMessageContent] - :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when input items are returned via API. Required. Is one of the - following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the output message. Required.""" - type: Literal[ItemResourceType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output message. Always ``message``. Required.""" - role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\".""" - content: list["_models.OutputMessageContent"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content of the output message. Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the message input. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when input items are returned via API. Required. Is one of the - following types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + """Total number of unique clusters. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Method used for clustering. Required.""" + usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Token usage while performing clustering analysis. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - content: list["_models.OutputMessageContent"], - status: Literal["in_progress", "completed", "incomplete"], - created_by: Optional[Union["_models.CreatedBy", str]] = None, + sample_count: int, + unique_subcluster_count: int, + unique_cluster_count: int, + method: str, + usage: "_models.ClusterTokenUsage", ) -> None: ... @overload @@ -9441,57 +7774,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemResourceType.OUTPUT_MESSAGE # type: ignore - self.role: Literal["assistant"] = "assistant" -class ItemResourceWebSearchToolCall(ItemResource, discriminator="web_search_call"): - """Web search tool call. +class ItemReferenceParam(InputItem, discriminator="item_reference"): + """Item reference. - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar id: The unique ID of the web search tool call. Required. + :ivar type: The type of item to reference. Always ``item_reference``. Required. ITEM_REFERENCE. + :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE + :ivar id: The ID of the item to reference. Required. :vartype id: str - :ivar type: The type of the web search tool call. Always ``web_search_call``. Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL - :ivar status: The status of the web search tool call. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] - :vartype status: str or str or str or str - :ivar action: An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. Is one of - the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind - :vartype action: ~azure.ai.projects.models.WebSearchActionSearch or - ~azure.ai.projects.models.WebSearchActionOpenPage or - ~azure.ai.projects.models.WebSearchActionFind """ + type: Literal[InputItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of item to reference. Always ``item_reference``. Required. ITEM_REFERENCE.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the web search tool call. Required.""" - type: Literal[ItemResourceType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the web search tool call. Always ``web_search_call``. Required.""" - status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the web search tool call. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" - action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = ( - rest_field(visibility=["read", "create", "update", "delete", "query"]) - ) - """An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. Is one of - the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind""" + """The ID of the item to reference. Required.""" @overload def __init__( self, *, id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "failed"], - action: Union[ - "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind" - ], - created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -9503,14 +7806,14 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemResourceType.WEB_SEARCH_CALL # type: ignore + self.type = InputItemType.ITEM_REFERENCE # type: ignore class KeyPressAction(ComputerAction, discriminator="keypress"): """KeyPress. :ivar type: Specifies the event type. For a keypress action, this property is always set to - ``keypress``. Required. + ``keypress``. Required. KEYPRESS. :vartype type: str or ~azure.ai.projects.models.KEYPRESS :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an array of strings, each representing a key. Required. @@ -9519,7 +7822,7 @@ class KeyPressAction(ComputerAction, discriminator="keypress"): type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """Specifies the event type. For a keypress action, this property is always set to ``keypress``. - Required.""" + Required. KEYPRESS.""" keys_property: list[str] = rest_field( name="keys", visibility=["read", "create", "update", "delete", "query"], original_tsp_name="keys" ) @@ -9599,12 +7902,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class LocalShellToolParam(Tool, discriminator="local_shell"): """Local shell tool. - :ivar type: The type of the local shell tool. Always ``local_shell``. Required. + :ivar type: The type of the local shell tool. Always ``local_shell``. Required. LOCAL_SHELL. :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL """ type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell tool. Always ``local_shell``. Required.""" + """The type of the local shell tool. Always ``local_shell``. Required. LOCAL_SHELL.""" @overload def __init__( @@ -9679,14 +7982,14 @@ class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): :vartype description: str :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar type: Type of index. Required. Managed Azure Search + :ivar type: Type of index. Required. Managed Azure Search. :vartype type: str or ~azure.ai.projects.models.MANAGED_AZURE_SEARCH :ivar vector_store_id: Vector store id of managed index. Required. :vartype vector_store_id: str """ type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. Managed Azure Search""" + """Type of index. Required. Managed Azure Search.""" vector_store_id: str = rest_field(name="vectorStoreId", visibility=["create"]) """Vector store id of managed index. Required.""" @@ -9767,7 +8070,7 @@ class MCPListToolsToolInputSchema(_Model): class MCPTool(Tool, discriminator="mcp"): """MCP tool. - :ivar type: The type of the MCP tool. Always ``mcp``. Required. + :ivar type: The type of the MCP tool. Always ``mcp``. Required. MCP. :vartype type: str or ~azure.ai.projects.models.MCP :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required. :vartype server_label: str @@ -9775,10 +8078,9 @@ class MCPTool(Tool, discriminator="mcp"): provided. :vartype server_url: str :ivar connector_id: Identifier for service connectors, like those available in ChatGPT. One of - ``server_url`` or ``connector_id`` must be provided. Learn more about service - connectors `here - `_. - Currently supported ``connector_id`` values are: + ``server_url`` or ``connector_id`` must be provided. Learn more about service connectors `here + `_. Currently supported + ``connector_id`` values are: * Dropbox: `connector_dropbox` * Gmail: `connector_gmail` @@ -9794,8 +8096,8 @@ class MCPTool(Tool, discriminator="mcp"): Literal["connector_sharepoint"] :vartype connector_id: str or str or str or str or str or str or str or str :ivar authorization: An OAuth access token that can be used with a remote MCP server, either - with a custom MCP server URL or a service connector. Your application - must handle the OAuth authorization flow and provide the token here. + with a custom MCP server URL or a service connector. Your application must handle the OAuth + authorization flow and provide the token here. :vartype authorization: str :ivar server_description: Optional description of the MCP server, used to provide more context. :vartype server_description: str @@ -9813,12 +8115,11 @@ class MCPTool(Tool, discriminator="mcp"): """ type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the MCP tool. Always ``mcp``. Required.""" + """The type of the MCP tool. Always ``mcp``. Required. MCP.""" server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A label for this MCP server, used to identify it in tool calls. Required.""" server_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be - provided.""" + """The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be provided.""" connector_id: Optional[ Literal[ "connector_dropbox", @@ -9831,11 +8132,10 @@ class MCPTool(Tool, discriminator="mcp"): "connector_sharepoint", ] ] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Identifier for service connectors, like those available in ChatGPT. One of - ``server_url`` or ``connector_id`` must be provided. Learn more about service - connectors `here - `_. - Currently supported ``connector_id`` values are: + """Identifier for service connectors, like those available in ChatGPT. One of ``server_url`` or + ``connector_id`` must be provided. Learn more about service connectors `here + `_. Currently supported + ``connector_id`` values are: * Dropbox: `connector_dropbox` * Gmail: `connector_gmail` @@ -9850,9 +8150,9 @@ class MCPTool(Tool, discriminator="mcp"): Literal[\"connector_microsoftteams\"], Literal[\"connector_outlookcalendar\"], Literal[\"connector_outlookemail\"], Literal[\"connector_sharepoint\"]""" authorization: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An OAuth access token that can be used with a remote MCP server, either - with a custom MCP server URL or a service connector. Your application - must handle the OAuth authorization flow and provide the token here.""" + """An OAuth access token that can be used with a remote MCP server, either with a custom MCP + server URL or a service connector. Your application must handle the OAuth authorization flow + and provide the token here.""" server_description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Optional description of the MCP server, used to provide more context.""" headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -9911,20 +8211,20 @@ class MCPToolFilter(_Model): :ivar tool_names: MCP allowed tools. :vartype tool_names: list[str] - :ivar read_only: Indicates whether or not a tool modifies data or is read-only. If an - MCP server is `annotated with `readOnlyHint` + :ivar read_only: Indicates whether or not a tool modifies data or is read-only. If an MCP + server is `annotated with `readOnlyHint` `_, - it will match this filter. + it will match this filter. :vartype read_only: bool """ tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """MCP allowed tools.""" read_only: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Indicates whether or not a tool modifies data or is read-only. If an - MCP server is `annotated with `readOnlyHint` + """Indicates whether or not a tool modifies data or is read-only. If an MCP server is `annotated + with `readOnlyHint` `_, - it will match this filter.""" + it will match this filter.""" @overload def __init__( @@ -10069,16 +8369,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class MemorySearchPreviewTool(Tool, discriminator="memory_search"): +class MemorySearchPreviewTool(Tool, discriminator="memory_search_preview"): """A tool for integrating memories into the agent. - :ivar type: The type of the tool. Always ``memory_search``. Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH + :ivar type: The type of the tool. Always ``memory_search_preview``. Required. + MEMORY_SEARCH_PREVIEW. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_PREVIEW :ivar memory_store_name: The name of the memory store to use. Required. :vartype memory_store_name: str - :ivar scope: The namespace used to group and isolate memories, such as a user ID. - Limits which memories can be retrieved or updated. - Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required. + :ivar scope: The namespace used to group and isolate memories, such as a user ID. Limits which + memories can be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to + the current signed-in user. Required. :vartype scope: str :ivar search_options: Options for searching the memory store. :vartype search_options: ~azure.ai.projects.models.MemorySearchOptions @@ -10087,14 +8388,14 @@ class MemorySearchPreviewTool(Tool, discriminator="memory_search"): :vartype update_delay: int """ - type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``memory_search``. Required.""" + type: Literal[ToolType.MEMORY_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH_PREVIEW.""" memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the memory store to use. Required.""" scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The namespace used to group and isolate memories, such as a user ID. - Limits which memories can be retrieved or updated. - Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required.""" + """The namespace used to group and isolate memories, such as a user ID. Limits which memories can + be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to the current + signed-in user. Required.""" search_options: Optional["_models.MemorySearchOptions"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -10121,59 +8422,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.MEMORY_SEARCH # type: ignore - - -class MemorySearchToolCallItemResource(ItemResource, discriminator="memory_search_call"): - """MemorySearchToolCallItemResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL - :ivar status: The status of the memory search tool call. One of ``in_progress``, - ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following - types: Literal["in_progress"], Literal["searching"], Literal["completed"], - Literal["incomplete"], Literal["failed"] - :vartype status: str or str or str or str or str - :ivar results: The results returned from the memory search. - :vartype results: list[~azure.ai.projects.models.MemorySearchItem] - """ - - type: Literal[ItemResourceType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the memory search tool call. One of ``in_progress``, - ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" - results: Optional[list["_models.MemorySearchItem"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results returned from the memory search.""" - - @overload - def __init__( - self, - *, - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - results: Optional[list["_models.MemorySearchItem"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.MEMORY_SEARCH_CALL # type: ignore + self.type = ToolType.MEMORY_SEARCH_PREVIEW # type: ignore class MemoryStoreDefinition(_Model): @@ -10298,9 +8547,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class MemoryStoreDeleteScopeResult(_Model): """Response for deleting memories from a scope. - :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. Default value is - "memory_store.scope.deleted". - :vartype object: str + :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. + MEMORY_STORE_SCOPE_DELETED. + :vartype object: str or ~azure.ai.projects.models.MEMORY_STORE_SCOPE_DELETED :ivar name: The name of the memory store. Required. :vartype name: str :ivar scope: The scope from which memories were deleted. Required. @@ -10309,11 +8558,10 @@ class MemoryStoreDeleteScopeResult(_Model): :vartype deleted: bool """ - object: Literal["memory_store.scope.deleted"] = rest_field( + object: Literal[MemoryStoreObjectType.MEMORY_STORE_SCOPE_DELETED] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The object type. Always 'memory_store.scope.deleted'. Required. Default value is - \"memory_store.scope.deleted\".""" + """The object type. Always 'memory_store.scope.deleted'. Required. MEMORY_STORE_SCOPE_DELETED.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the memory store. Required.""" scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -10325,6 +8573,7 @@ class MemoryStoreDeleteScopeResult(_Model): def __init__( self, *, + object: Literal[MemoryStoreObjectType.MEMORY_STORE_SCOPE_DELETED], name: str, scope: str, deleted: bool, @@ -10339,15 +8588,13 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["memory_store.scope.deleted"] = "memory_store.scope.deleted" class MemoryStoreDetails(_Model): """A memory store that can store and retrieve user memories. - :ivar object: The object type, which is always 'memory_store'. Required. Default value is - "memory_store". - :vartype object: str + :ivar object: The object type, which is always 'memory_store'. Required. MEMORY_STORE. + :vartype object: str or ~azure.ai.projects.models.MEMORY_STORE :ivar id: The unique identifier of the memory store. Required. :vartype id: str :ivar created_at: The Unix timestamp (seconds) when the memory store was created. Required. @@ -10365,8 +8612,10 @@ class MemoryStoreDetails(_Model): :vartype definition: ~azure.ai.projects.models.MemoryStoreDefinition """ - object: Literal["memory_store"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'memory_store'. Required. Default value is \"memory_store\".""" + object: Literal[MemoryStoreObjectType.MEMORY_STORE] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type, which is always 'memory_store'. Required. MEMORY_STORE.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The unique identifier of the memory store. Required.""" created_at: datetime.datetime = rest_field( @@ -10390,6 +8639,7 @@ class MemoryStoreDetails(_Model): def __init__( self, *, + object: Literal[MemoryStoreObjectType.MEMORY_STORE], id: str, # pylint: disable=redefined-builtin created_at: datetime.datetime, updated_at: datetime.datetime, @@ -10408,7 +8658,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["memory_store"] = "memory_store" class MemoryStoreOperationUsage(_Model): @@ -10606,13 +8855,15 @@ class MicrosoftFabricPreviewTool(Tool, discriminator="fabric_dataagent_preview") """The input definition information for a Microsoft Fabric tool as used to configure an agent. :ivar type: The object type, which is always 'fabric_dataagent_preview'. Required. + FABRIC_DATAAGENT_PREVIEW. :vartype type: str or ~azure.ai.projects.models.FABRIC_DATAAGENT_PREVIEW :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required. :vartype fabric_dataagent_preview: ~azure.ai.projects.models.FabricDataAgentToolParameters """ type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'fabric_dataagent_preview'. Required.""" + """The object type, which is always 'fabric_dataagent_preview'. Required. + FABRIC_DATAAGENT_PREVIEW.""" fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -10642,7 +8893,7 @@ class ModelDeployment(Deployment, discriminator="ModelDeployment"): :ivar name: Name of the deployment. Required. :vartype name: str - :ivar type: The type of the deployment. Required. Model deployment + :ivar type: The type of the deployment. Required. Model deployment. :vartype type: str or ~azure.ai.projects.models.MODEL_DEPLOYMENT :ivar model_name: Publisher-specific name of the deployed model. Required. :vartype model_name: str @@ -10659,7 +8910,7 @@ class ModelDeployment(Deployment, discriminator="ModelDeployment"): """ type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the deployment. Required. Model deployment""" + """The type of the deployment. Required. Model deployment.""" model_name: str = rest_field(name="modelName", visibility=["read"]) """Publisher-specific name of the deployed model. Required.""" model_version: str = rest_field(name="modelVersion", visibility=["read"]) @@ -10738,6 +8989,50 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class ModelSamplingParams(_Model): + """Represents a set of parameters used to control the sampling behavior of a language model during + text generation. + + :ivar temperature: The temperature parameter for sampling. Required. + :vartype temperature: float + :ivar top_p: The top-p parameter for nucleus sampling. Required. + :vartype top_p: float + :ivar seed: The random seed for reproducibility. Required. + :vartype seed: int + :ivar max_completion_tokens: The maximum number of tokens allowed in the completion. Required. + :vartype max_completion_tokens: int + """ + + temperature: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The temperature parameter for sampling. Required.""" + top_p: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The top-p parameter for nucleus sampling. Required.""" + seed: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The random seed for reproducibility. Required.""" + max_completion_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of tokens allowed in the completion. Required.""" + + @overload + def __init__( + self, + *, + temperature: float, + top_p: float, + seed: int, + max_completion_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class MonthlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Monthly"): """Monthly recurrence schedule. @@ -10776,8 +9071,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Move(ComputerAction, discriminator="move"): """Move. - :ivar type: Specifies the event type. For a move action, this property is - always set to ``move``. Required. + :ivar type: Specifies the event type. For a move action, this property is always set to + ``move``. Required. MOVE. :vartype type: str or ~azure.ai.projects.models.MOVE :ivar x: The x-coordinate to move to. Required. :vartype x: int @@ -10786,8 +9081,8 @@ class Move(ComputerAction, discriminator="move"): """ type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a move action, this property is - always set to ``move``. Required.""" + """Specifies the event type. For a move action, this property is always set to ``move``. Required. + MOVE.""" x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The x-coordinate to move to. Required.""" y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -10816,12 +9111,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): """Credentials that do not require authentication. - :ivar type: The credential type. Required. No credential + :ivar type: The credential type. Required. No credential. :vartype type: str or ~azure.ai.projects.models.NONE """ type: Literal[CredentialType.NONE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. No credential""" + """The credential type. Required. No credential.""" @overload def __init__( @@ -10840,53 +9135,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = CredentialType.NONE # type: ignore -class OAuthConsentRequestItemResource(ItemResource, discriminator="oauth_consent_request"): - """Request from the service for the user to perform OAuth consent. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar id: Required. - :vartype id: str - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.OAUTH_CONSENT_REQUEST - :ivar consent_link: The link the user can use to perform OAuth consent. Required. - :vartype consent_link: str - :ivar server_label: The server label for the OAuth consent request. Required. - :vartype server_label: str - """ - - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - type: Literal[ItemResourceType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The link the user can use to perform OAuth consent. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The server label for the OAuth consent request. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - consent_link: str, - server_label: str, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.OAUTH_CONSENT_REQUEST # type: ignore - - class OneTimeTrigger(Trigger, discriminator="OneTime"): """One-time trigger. @@ -10962,12 +9210,12 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"): """Security details for OpenApi anonymous authentication. - :ivar type: The object type, which is always 'anonymous'. Required. + :ivar type: The object type, which is always 'anonymous'. Required. ANONYMOUS. :vartype type: str or ~azure.ai.projects.models.ANONYMOUS """ type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'anonymous'. Required.""" + """The object type, which is always 'anonymous'. Required. ANONYMOUS.""" @overload def __init__( @@ -10995,7 +9243,7 @@ class OpenApiFunctionDefinition(_Model): and how to call the function. :vartype description: str :ivar spec: The openapi function shape, described as a JSON Schema object. Required. - :vartype spec: any + :vartype spec: dict[str, any] :ivar auth: Open API authentication details. Required. :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. @@ -11009,7 +9257,7 @@ class OpenApiFunctionDefinition(_Model): description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A description of what the function does, used by the model to choose when and how to call the function.""" - spec: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + spec: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The openapi function shape, described as a JSON Schema object. Required.""" auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Open API authentication details. Required.""" @@ -11023,7 +9271,7 @@ def __init__( self, *, name: str, - spec: Any, + spec: dict[str, Any], auth: "_models.OpenApiAuthDetails", description: Optional[str] = None, default_params: Optional[list[str]] = None, @@ -11050,7 +9298,7 @@ class OpenApiFunctionDefinitionFunction(_Model): :vartype description: str :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. Required. - :vartype parameters: any + :vartype parameters: dict[str, any] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -11058,7 +9306,7 @@ class OpenApiFunctionDefinitionFunction(_Model): description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A description of what the function does, used by the model to choose when and how to call the function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The parameters the functions accepts, described as a JSON Schema object. Required.""" @overload @@ -11066,7 +9314,7 @@ def __init__( self, *, name: str, - parameters: Any, + parameters: dict[str, Any], description: Optional[str] = None, ) -> None: ... @@ -11084,14 +9332,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"): """Security details for OpenApi managed_identity authentication. - :ivar type: The object type, which is always 'managed_identity'. Required. + :ivar type: The object type, which is always 'managed_identity'. Required. MANAGED_IDENTITY. :vartype type: str or ~azure.ai.projects.models.MANAGED_IDENTITY :ivar security_scheme: Connection auth security details. Required. :vartype security_scheme: ~azure.ai.projects.models.OpenApiManagedSecurityScheme """ type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'managed_identity'. Required.""" + """The object type, which is always 'managed_identity'. Required. MANAGED_IDENTITY.""" security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -11148,13 +9396,14 @@ class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="pro """Security details for OpenApi project connection authentication. :ivar type: The object type, which is always 'project_connection'. Required. + PROJECT_CONNECTION. :vartype type: str or ~azure.ai.projects.models.PROJECT_CONNECTION :ivar security_scheme: Project connection auth security details. Required. :vartype security_scheme: ~azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme """ type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'project_connection'. Required.""" + """The object type, which is always 'project_connection'. Required. PROJECT_CONNECTION.""" security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -11210,14 +9459,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class OpenApiTool(Tool, discriminator="openapi"): """The input definition information for an OpenAPI tool as used to configure an agent. - :ivar type: The object type, which is always 'openapi'. Required. + :ivar type: The object type, which is always 'openapi'. Required. OPENAPI. :vartype type: str or ~azure.ai.projects.models.OPENAPI :ivar openapi: The openapi function definition. Required. :vartype openapi: ~azure.ai.projects.models.OpenApiFunctionDefinition """ type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'openapi'. Required.""" + """The object type, which is always 'openapi'. Required. OPENAPI.""" openapi: "_models.OpenApiFunctionDefinition" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -11242,38 +9491,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ToolType.OPENAPI # type: ignore -class OutputContent(_Model): - """OutputContent. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - OutputContentOutputTextContent, ReasoningTextContent, OutputContentRefusalContent - - :ivar type: Required. Known values are: "output_text", "refusal", and "reasoning_text". - :vartype type: str or ~azure.ai.projects.models.OutputContentType - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"output_text\", \"refusal\", and \"reasoning_text\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class OutputMessageContent(_Model): """OutputMessageContent. @@ -11309,7 +9526,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class OutputMessageContentOutputTextContent(OutputMessageContent, discriminator="output_text"): """Output text. - :ivar type: The type of the output text. Always ``output_text``. Required. + :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT. :vartype type: str or ~azure.ai.projects.models.OUTPUT_TEXT :ivar text: The text output from the model. Required. :vartype text: str @@ -11320,7 +9537,7 @@ class OutputMessageContentOutputTextContent(OutputMessageContent, discriminator= """ type: Literal[OutputMessageContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output text. Always ``output_text``. Required.""" + """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.""" text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The text output from the model. Required.""" annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -11351,14 +9568,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class OutputMessageContentRefusalContent(OutputMessageContent, discriminator="refusal"): """Refusal. - :ivar type: The type of the refusal. Always ``refusal``. Required. + :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL. :vartype type: str or ~azure.ai.projects.models.REFUSAL :ivar refusal: The refusal explanation from the model. Required. :vartype refusal: str """ type: Literal[OutputMessageContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the refusal. Always ``refusal``. Required.""" + """The type of the refusal. Always ``refusal``. Required. REFUSAL.""" refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The refusal explanation from the model. Required.""" @@ -11483,7 +9700,7 @@ class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. + :ivar kind: Required. PROMPT. :vartype kind: str or ~azure.ai.projects.models.PROMPT :ivar model: The model deployment to use for this agent. Required. :vartype model: str @@ -11491,8 +9708,7 @@ class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): :vartype instructions: str :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and - deterministic. - We generally recommend altering this or ``top_p`` but not both. + deterministic. We generally recommend altering this or ``top_p`` but not both. :vartype temperature: float :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability @@ -11503,27 +9719,31 @@ class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): :vartype top_p: float :ivar reasoning: :vartype reasoning: ~azure.ai.projects.models.Reasoning - :ivar tools: An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter. + :ivar tools: An array of tools the model may call while generating a response. You can specify + which tool to use by setting the ``tool_choice`` parameter. :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar tool_choice: How the model should select which tool (or tools) to use when generating a + response. See the ``tools`` parameter to see how to specify which tools the model can call. Is + either a str type or a ToolChoiceParam type. + :vartype tool_choice: str or ~azure.ai.projects.models.ToolChoiceParam :ivar text: Configuration options for a text response from the model. Can be plain text or structured JSON data. - :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionText + :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionTextOptions :ivar structured_inputs: Set of structured inputs that can participate in prompt template substitution or tool argument bindings. :vartype structured_inputs: dict[str, ~azure.ai.projects.models.StructuredInputDefinition] """ kind: Literal[AgentKind.PROMPT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + """Required. PROMPT.""" model: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The model deployment to use for this agent. Required.""" instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A system (or developer) message inserted into the model's context.""" temperature: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output - more random, while lower values like 0.2 will make it more focused and deterministic. - We generally recommend altering this or ``top_p`` but not both.""" + more random, while lower values like 0.2 will make it more focused and deterministic. We + generally recommend altering this or ``top_p`` but not both.""" top_p: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability @@ -11533,9 +9753,15 @@ class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): We generally recommend altering this or ``temperature`` but not both.""" reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter.""" - text: Optional["_models.PromptAgentDefinitionText"] = rest_field( + """An array of tools the model may call while generating a response. You can specify which tool to + use by setting the ``tool_choice`` parameter.""" + tool_choice: Optional[Union[str, "_models.ToolChoiceParam"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """How the model should select which tool (or tools) to use when generating a response. See the + ``tools`` parameter to see how to specify which tools the model can call. Is either a str type + or a ToolChoiceParam type.""" + text: Optional["_models.PromptAgentDefinitionTextOptions"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """Configuration options for a text response from the model. Can be plain text or structured JSON @@ -11557,7 +9783,8 @@ def __init__( top_p: Optional[float] = None, reasoning: Optional["_models.Reasoning"] = None, tools: Optional[list["_models.Tool"]] = None, - text: Optional["_models.PromptAgentDefinitionText"] = None, + tool_choice: Optional[Union[str, "_models.ToolChoiceParam"]] = None, + text: Optional["_models.PromptAgentDefinitionTextOptions"] = None, structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = None, ) -> None: ... @@ -11573,8 +9800,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.kind = AgentKind.PROMPT # type: ignore -class PromptAgentDefinitionText(_Model): - """PromptAgentDefinitionText. +class PromptAgentDefinitionTextOptions(_Model): + """Configuration options for a text response from the model. Can be plain text or structured JSON + data. :ivar format: :vartype format: ~azure.ai.projects.models.TextResponseFormatConfiguration @@ -11607,20 +9835,20 @@ class PromptBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="prompt" :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters like type, properties, required. - :vartype init_parameters: any + :vartype init_parameters: dict[str, any] :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like type, properties, required. - :vartype data_schema: any + :vartype data_schema: dict[str, any] :ivar metrics: List of output metrics produced by this evaluator. :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] - :ivar type: Required. Prompt-based definition + :ivar type: Required. Prompt-based definition. :vartype type: str or ~azure.ai.projects.models.PROMPT :ivar prompt_text: The prompt text used for evaluation. Required. :vartype prompt_text: str """ type: Literal[EvaluatorDefinitionType.PROMPT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Prompt-based definition""" + """Required. Prompt-based definition.""" prompt_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The prompt text used for evaluation. Required.""" @@ -11629,8 +9857,8 @@ def __init__( self, *, prompt_text: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, + init_parameters: Optional[dict[str, Any]] = None, + data_schema: Optional[dict[str, Any]] = None, metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, ) -> None: ... @@ -11806,17 +10034,19 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ReasoningTextContent(OutputContent, discriminator="reasoning_text"): +class ReasoningTextContent(_Model): """ReasoningTextContent. - :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required. - :vartype type: str or ~azure.ai.projects.models.REASONING_TEXT + :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required. Default value + is "reasoning_text". + :vartype type: str :ivar text: The reasoning text from the model. Required. :vartype text: str """ - type: Literal[OutputContentType.REASONING_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the reasoning text. Always ``reasoning_text``. Required.""" + type: Literal["reasoning_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the reasoning text. Always ``reasoning_text``. Required. Default value is + \"reasoning_text\".""" text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The reasoning text from the model. Required.""" @@ -11836,7 +10066,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = OutputContentType.REASONING_TEXT # type: ignore + self.type: Literal["reasoning_text"] = "reasoning_text" class RecurrenceTrigger(Trigger, discriminator="Recurrence"): @@ -12042,14 +10272,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class SASCredentials(BaseCredentials, discriminator="SAS"): """Shared Access Signature (SAS) credential definition. - :ivar type: The credential type. Required. Shared Access Signature (SAS) credential + :ivar type: The credential type. Required. Shared Access Signature (SAS) credential. :vartype type: str or ~azure.ai.projects.models.SAS :ivar sas_token: SAS token. :vartype sas_token: str """ type: Literal[CredentialType.SAS] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Shared Access Signature (SAS) credential""" + """The credential type. Required. Shared Access Signature (SAS) credential.""" sas_token: Optional[str] = rest_field(name="SAS", visibility=["read"]) """SAS token.""" @@ -12202,14 +10432,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Screenshot(ComputerAction, discriminator="screenshot"): """Screenshot. - :ivar type: Specifies the event type. For a screenshot action, this property is - always set to ``screenshot``. Required. + :ivar type: Specifies the event type. For a screenshot action, this property is always set to + ``screenshot``. Required. SCREENSHOT. :vartype type: str or ~azure.ai.projects.models.SCREENSHOT """ type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a screenshot action, this property is - always set to ``screenshot``. Required.""" + """Specifies the event type. For a screenshot action, this property is always set to + ``screenshot``. Required. SCREENSHOT.""" @overload def __init__( @@ -12231,8 +10461,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Scroll(ComputerAction, discriminator="scroll"): """Scroll. - :ivar type: Specifies the event type. For a scroll action, this property is - always set to ``scroll``. Required. + :ivar type: Specifies the event type. For a scroll action, this property is always set to + ``scroll``. Required. SCROLL. :vartype type: str or ~azure.ai.projects.models.SCROLL :ivar x: The x-coordinate where the scroll occurred. Required. :vartype x: int @@ -12245,8 +10475,8 @@ class Scroll(ComputerAction, discriminator="scroll"): """ type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a scroll action, this property is - always set to ``scroll``. Required.""" + """Specifies the event type. For a scroll action, this property is always set to ``scroll``. + Required. SCROLL.""" x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The x-coordinate where the scroll occurred. Required.""" y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -12282,16 +10512,15 @@ class SharepointGroundingToolParameters(_Model): """The sharepoint grounding tool parameters. :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. + maximum of 1 connection resource attached to the tool. :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] """ project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" + """The project connections attached to this tool. There can be a maximum of 1 connection resource + attached to the tool.""" @overload def __init__( @@ -12315,6 +10544,7 @@ class SharepointPreviewTool(Tool, discriminator="sharepoint_grounding_preview"): """The input definition information for a sharepoint tool as used to configure an agent. :ivar type: The object type, which is always 'sharepoint_grounding_preview'. Required. + SHAREPOINT_GROUNDING_PREVIEW. :vartype type: str or ~azure.ai.projects.models.SHAREPOINT_GROUNDING_PREVIEW :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required. :vartype sharepoint_grounding_preview: @@ -12322,7 +10552,8 @@ class SharepointPreviewTool(Tool, discriminator="sharepoint_grounding_preview"): """ type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'sharepoint_grounding_preview'. Required.""" + """The object type, which is always 'sharepoint_grounding_preview'. Required. + SHAREPOINT_GROUNDING_PREVIEW.""" sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -12347,6 +10578,100 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore +class ToolChoiceParam(_Model): + """How the model should select which tool (or tools) to use when generating a response. See the + ``tools`` parameter to see how to specify which tools the model can call. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ToolChoiceAllowed, SpecificApplyPatchParam, ToolChoiceCodeInterpreter, + ToolChoiceComputerUsePreview, ToolChoiceCustom, ToolChoiceFileSearch, ToolChoiceFunction, + ToolChoiceImageGeneration, ToolChoiceMCP, SpecificFunctionShellParam, + ToolChoiceWebSearchPreview, ToolChoiceWebSearchPreview20250311 + + :ivar type: Required. Known values are: "allowed_tools", "function", "mcp", "custom", + "apply_patch", "shell", "file_search", "web_search_preview", "computer_use_preview", + "web_search_preview_2025_03_11", "image_generation", and "code_interpreter". + :vartype type: str or ~azure.ai.projects.models.ToolChoiceParamType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"allowed_tools\", \"function\", \"mcp\", \"custom\", + \"apply_patch\", \"shell\", \"file_search\", \"web_search_preview\", \"computer_use_preview\", + \"web_search_preview_2025_03_11\", \"image_generation\", and \"code_interpreter\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SpecificApplyPatchParam(ToolChoiceParam, discriminator="apply_patch"): + """Specific apply patch tool choice. + + :ivar type: The tool to call. Always ``apply_patch``. Required. APPLY_PATCH. + :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH + """ + + type: Literal[ToolChoiceParamType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The tool to call. Always ``apply_patch``. Required. APPLY_PATCH.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.APPLY_PATCH # type: ignore + + +class SpecificFunctionShellParam(ToolChoiceParam, discriminator="shell"): + """Specific shell tool choice. + + :ivar type: The tool to call. Always ``shell``. Required. SHELL. + :vartype type: str or ~azure.ai.projects.models.SHELL + """ + + type: Literal[ToolChoiceParamType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The tool to call. Always ``shell``. Required. SHELL.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.SHELL # type: ignore + + class StructuredInputDefinition(_Model): """An structured input that can participate in prompt template substitutions and tool argument binding. @@ -12356,7 +10681,7 @@ class StructuredInputDefinition(_Model): :ivar default_value: The default value for the input if no run-time value is provided. :vartype default_value: any :ivar schema: The JSON schema for the structured input (optional). - :vartype schema: any + :vartype schema: dict[str, any] :ivar required: Whether the input property is required when the agent is invoked. :vartype required: bool """ @@ -12365,7 +10690,7 @@ class StructuredInputDefinition(_Model): """A human-readable description of the input.""" default_value: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The default value for the input if no run-time value is provided.""" - schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + schema: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The JSON schema for the structured input (optional).""" required: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Whether the input property is required when the agent is invoked.""" @@ -12376,7 +10701,7 @@ def __init__( *, description: Optional[str] = None, default_value: Optional[Any] = None, - schema: Optional[Any] = None, + schema: Optional[dict[str, Any]] = None, required: Optional[bool] = None, ) -> None: ... @@ -12400,7 +10725,7 @@ class StructuredOutputDefinition(_Model): emit the output. Required. :vartype description: str :ivar schema: The JSON schema for the structured output. Required. - :vartype schema: any + :vartype schema: dict[str, any] :ivar strict: Whether to enforce strict validation. Default ``true``. Required. :vartype strict: bool """ @@ -12410,7 +10735,7 @@ class StructuredOutputDefinition(_Model): description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A description of the output to emit. Used by the model to determine when to emit the output. Required.""" - schema: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The JSON schema for the structured output. Required.""" strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Whether to enforce strict validation. Default ``true``. Required.""" @@ -12421,7 +10746,7 @@ def __init__( *, name: str, description: str, - schema: Any, + schema: dict[str, Any], strict: bool, ) -> None: ... @@ -12436,43 +10761,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class StructuredOutputsItemResource(ItemResource, discriminator="structured_outputs"): - """StructuredOutputsItemResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.STRUCTURED_OUTPUTS - :ivar output: The structured output captured during the response. Required. - :vartype output: any - """ - - type: Literal[ItemResourceType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The structured output captured during the response. Required.""" - - @overload - def __init__( - self, - *, - output: Any, - created_by: Optional[Union["_models.CreatedBy", str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.STRUCTURED_OUTPUTS # type: ignore - - class Summary(_Model): """Summary text. @@ -12619,15 +10907,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class TextResponseFormatConfiguration(_Model): - """An object specifying the format that the model must output. - Configuring ``{ "type": "json_schema" }`` enables Structured Outputs, - which ensures the model will match your supplied JSON schema. Learn more in the - `Structured Outputs guide `_. - The default format is ``{ "type": "text" }`` with no additional options. - *Not recommended for gpt-4o and newer models:** - Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which - ensures the message the model generates is valid JSON. Using ``json_schema`` - is preferred for models that support it. + """An object specifying the format that the model must output. Configuring ``{ "type": + "json_schema" }`` enables Structured Outputs, which ensures the model will match your supplied + JSON schema. Learn more in the `Structured Outputs guide + `_. The default format is ``{ + "type": "text" }`` with no additional options. *Not recommended for gpt-4o and newer models:** + Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which ensures the message + the model generates is valid JSON. Using ``json_schema`` is preferred for models that support + it. You probably want to use the sub-classes and not this class directly. Known sub-classes are: TextResponseFormatConfigurationResponseFormatJsonObject, TextResponseFormatJsonSchema, @@ -12665,11 +10952,12 @@ class TextResponseFormatConfigurationResponseFormatJsonObject( """JSON object. :ivar type: The type of response format being defined. Always ``json_object``. Required. + JSON_OBJECT. :vartype type: str or ~azure.ai.projects.models.JSON_OBJECT """ type: Literal[TextResponseFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of response format being defined. Always ``json_object``. Required.""" + """The type of response format being defined. Always ``json_object``. Required. JSON_OBJECT.""" @overload def __init__( @@ -12693,12 +10981,12 @@ class TextResponseFormatConfigurationResponseFormatText( ): # pylint: disable=name-too-long """Text. - :ivar type: The type of response format being defined. Always ``text``. Required. + :ivar type: The type of response format being defined. Always ``text``. Required. TEXT. :vartype type: str or ~azure.ai.projects.models.TEXT """ type: Literal[TextResponseFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of response format being defined. Always ``text``. Required.""" + """The type of response format being defined. Always ``text``. Required. TEXT.""" @overload def __init__( @@ -12721,12 +11009,13 @@ class TextResponseFormatJsonSchema(TextResponseFormatConfiguration, discriminato """JSON schema. :ivar type: The type of response format being defined. Always ``json_schema``. Required. + JSON_SCHEMA. :vartype type: str or ~azure.ai.projects.models.JSON_SCHEMA :ivar description: A description of what the response format is for, used by the model to - determine how to respond in the format. + determine how to respond in the format. :vartype description: str - :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain - underscores and dashes, with a maximum length of 64. Required. + :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and + dashes, with a maximum length of 64. Required. :vartype name: str :ivar schema: Required. :vartype schema: dict[str, any] @@ -12735,13 +11024,13 @@ class TextResponseFormatJsonSchema(TextResponseFormatConfiguration, discriminato """ type: Literal[TextResponseFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of response format being defined. Always ``json_schema``. Required.""" + """The type of response format being defined. Always ``json_schema``. Required. JSON_SCHEMA.""" description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the response format is for, used by the model to - determine how to respond in the format.""" + """A description of what the response format is for, used by the model to determine how to respond + in the format.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the response format. Must be a-z, A-Z, 0-9, or contain - underscores and dashes, with a maximum length of 64. Required.""" + """The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with + a maximum length of 64. Required.""" schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -12768,6 +11057,336 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = TextResponseFormatConfigurationType.JSON_SCHEMA # type: ignore +class ToolChoiceAllowed(ToolChoiceParam, discriminator="allowed_tools"): + """Allowed tools. + + :ivar type: Allowed tool configuration type. Always ``allowed_tools``. Required. ALLOWED_TOOLS. + :vartype type: str or ~azure.ai.projects.models.ALLOWED_TOOLS + :ivar mode: Constrains the tools available to the model to a pre-defined set. ``auto`` allows + the model to pick from among the allowed tools and generate a message. ``required`` requires + the model to call one or more of the allowed tools. Required. Is either a Literal["auto"] type + or a Literal["required"] type. + :vartype mode: str or str + :ivar tools: A list of tool definitions that the model should be allowed to call. For the + Responses API, the list of tool definitions might look like: + .. code-block:: json + [ + { "type": "function", "name": "get_weather" }, + { "type": "mcp", "server_label": "deepwiki" }, + { "type": "image_generation" } + ]. Required. + :vartype tools: list[dict[str, any]] + """ + + type: Literal[ToolChoiceParamType.ALLOWED_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Allowed tool configuration type. Always ``allowed_tools``. Required. ALLOWED_TOOLS.""" + mode: Literal["auto", "required"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Constrains the tools available to the model to a pre-defined set. ``auto`` allows the model to + pick from among the allowed tools and generate a message. ``required`` requires the model to + call one or more of the allowed tools. Required. Is either a Literal[\"auto\"] type or a + Literal[\"required\"] type.""" + tools: list[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A list of tool definitions that the model should be allowed to call. For the Responses API, the + list of tool definitions might look like: + .. code-block:: json + [ + { "type": "function", "name": "get_weather" }, + { "type": "mcp", "server_label": "deepwiki" }, + { "type": "image_generation" } + ]. Required.""" + + + @overload + def __init__( + self, + *, + mode: Literal["auto", "required"], + tools: list[dict[str, Any]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.ALLOWED_TOOLS # type: ignore + + +class ToolChoiceCodeInterpreter(ToolChoiceParam, discriminator="code_interpreter"): + """Indicates that the model should use a built-in tool to generate a response. `Learn more about + built-in tools `_. + + :ivar type: Required. CODE_INTERPRETER. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER + """ + + type: Literal[ToolChoiceParamType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. CODE_INTERPRETER.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.CODE_INTERPRETER # type: ignore + + +class ToolChoiceComputerUsePreview(ToolChoiceParam, discriminator="computer_use_preview"): + """Indicates that the model should use a built-in tool to generate a response. `Learn more about + built-in tools `_. + + :ivar type: Required. COMPUTER_USE_PREVIEW. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW + """ + + type: Literal[ToolChoiceParamType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. COMPUTER_USE_PREVIEW.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.COMPUTER_USE_PREVIEW # type: ignore + + +class ToolChoiceCustom(ToolChoiceParam, discriminator="custom"): + """Custom tool. + + :ivar type: For custom tool calling, the type is always ``custom``. Required. CUSTOM. + :vartype type: str or ~azure.ai.projects.models.CUSTOM + :ivar name: The name of the custom tool to call. Required. + :vartype name: str + """ + + type: Literal[ToolChoiceParamType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For custom tool calling, the type is always ``custom``. Required. CUSTOM.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the custom tool to call. Required.""" + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.CUSTOM # type: ignore + + +class ToolChoiceFileSearch(ToolChoiceParam, discriminator="file_search"): + """Indicates that the model should use a built-in tool to generate a response. `Learn more about + built-in tools `_. + + :ivar type: Required. FILE_SEARCH. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH + """ + + type: Literal[ToolChoiceParamType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. FILE_SEARCH.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.FILE_SEARCH # type: ignore + + +class ToolChoiceFunction(ToolChoiceParam, discriminator="function"): + """Function tool. + + :ivar type: For function calling, the type is always ``function``. Required. FUNCTION. + :vartype type: str or ~azure.ai.projects.models.FUNCTION + :ivar name: The name of the function to call. Required. + :vartype name: str + """ + + type: Literal[ToolChoiceParamType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For function calling, the type is always ``function``. Required. FUNCTION.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to call. Required.""" + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.FUNCTION # type: ignore + + +class ToolChoiceImageGeneration(ToolChoiceParam, discriminator="image_generation"): + """Indicates that the model should use a built-in tool to generate a response. `Learn more about + built-in tools `_. + + :ivar type: Required. IMAGE_GENERATION. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + """ + + type: Literal[ToolChoiceParamType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. IMAGE_GENERATION.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.IMAGE_GENERATION # type: ignore + + +class ToolChoiceMCP(ToolChoiceParam, discriminator="mcp"): + """MCP tool. + + :ivar type: For MCP tools, the type is always ``mcp``. Required. MCP. + :vartype type: str or ~azure.ai.projects.models.MCP + :ivar server_label: The label of the MCP server to use. Required. + :vartype server_label: str + :ivar name: + :vartype name: str + """ + + type: Literal[ToolChoiceParamType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For MCP tools, the type is always ``mcp``. Required. MCP.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server to use. Required.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + server_label: str, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.MCP # type: ignore + + +class ToolChoiceWebSearchPreview(ToolChoiceParam, discriminator="web_search_preview"): + """Indicates that the model should use a built-in tool to generate a response. `Learn more about + built-in tools `_. + + :ivar type: Required. WEB_SEARCH_PREVIEW. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW + """ + + type: Literal[ToolChoiceParamType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. WEB_SEARCH_PREVIEW.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.WEB_SEARCH_PREVIEW # type: ignore + + +class ToolChoiceWebSearchPreview20250311(ToolChoiceParam, discriminator="web_search_preview_2025_03_11"): + """Indicates that the model should use a built-in tool to generate a response. `Learn more about + built-in tools `_. + + :ivar type: Required. WEB_SEARCH_PREVIEW2025_03_11. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW2025_03_11 + """ + + type: Literal[ToolChoiceParamType.WEB_SEARCH_PREVIEW2025_03_11] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. WEB_SEARCH_PREVIEW2025_03_11.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceParamType.WEB_SEARCH_PREVIEW2025_03_11 # type: ignore + + class ToolDescription(_Model): """Description of a tool that can be used by an agent. @@ -12871,16 +11490,16 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Type(ComputerAction, discriminator="type"): """Type. - :ivar type: Specifies the event type. For a type action, this property is - always set to ``type``. Required. + :ivar type: Specifies the event type. For a type action, this property is always set to + ``type``. Required. TYPE. :vartype type: str or ~azure.ai.projects.models.TYPE :ivar text: The text to type. Required. :vartype text: str """ type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a type action, this property is - always set to ``type``. Required.""" + """Specifies the event type. For a type action, this property is always set to ``type``. Required. + TYPE.""" text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The text to type. Required.""" @@ -12906,7 +11525,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class UrlCitationBody(Annotation, discriminator="url_citation"): """URL citation. - :ivar type: The type of the URL citation. Always ``url_citation``. Required. + :ivar type: The type of the URL citation. Always ``url_citation``. Required. URL_CITATION. :vartype type: str or ~azure.ai.projects.models.URL_CITATION :ivar url: The URL of the web resource. Required. :vartype url: str @@ -12920,7 +11539,7 @@ class UrlCitationBody(Annotation, discriminator="url_citation"): """ type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the URL citation. Always ``url_citation``. Required.""" + """The type of the URL citation. Always ``url_citation``. Required. URL_CITATION.""" url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The URL of the web resource. Required.""" start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -12996,11 +11615,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class VectorStoreFileAttributes(_Model): - """Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. Keys are strings - with a maximum length of 64 characters. Values are strings with a maximum - length of 512 characters, booleans, or numbers. + """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing + additional information about the object in a structured format, and querying for objects via + API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are + strings with a maximum length of 512 characters, booleans, or numbers. """ @@ -13008,14 +11626,14 @@ class VectorStoreFileAttributes(_Model): class Wait(ComputerAction, discriminator="wait"): """Wait. - :ivar type: Specifies the event type. For a wait action, this property is - always set to ``wait``. Required. + :ivar type: Specifies the event type. For a wait action, this property is always set to + ``wait``. Required. WAIT. :vartype type: str or ~azure.ai.projects.models.WAIT """ type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a wait action, this property is - always set to ``wait``. Required.""" + """Specifies the event type. For a wait action, this property is always set to ``wait``. Required. + WAIT.""" @overload def __init__( @@ -13266,7 +11884,7 @@ class WebSearchPreviewTool(Tool, discriminator="web_search_preview"): """Web search preview. :ivar type: The type of the web search tool. One of ``web_search_preview`` or - ``web_search_preview_2025_03_11``. Required. + ``web_search_preview_2025_03_11``. Required. WEB_SEARCH_PREVIEW. :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW :ivar user_location: :vartype user_location: ~azure.ai.projects.models.ApproximateLocation @@ -13278,7 +11896,7 @@ class WebSearchPreviewTool(Tool, discriminator="web_search_preview"): type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """The type of the web search tool. One of ``web_search_preview`` or - ``web_search_preview_2025_03_11``. Required.""" + ``web_search_preview_2025_03_11``. Required. WEB_SEARCH_PREVIEW.""" user_location: Optional["_models.ApproximateLocation"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -13313,7 +11931,7 @@ class WebSearchTool(Tool, discriminator="web_search"): """Web search. :ivar type: The type of the web search tool. One of ``web_search`` or - ``web_search_2025_08_26``. Required. + ``web_search_2025_08_26``. Required. WEB_SEARCH. :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH :ivar filters: :vartype filters: ~azure.ai.projects.models.WebSearchToolFilters @@ -13324,13 +11942,13 @@ class WebSearchTool(Tool, discriminator="web_search"): the following types: Literal["low"], Literal["medium"], Literal["high"] :vartype search_context_size: str or str or str :ivar custom_search_configuration: The project connections attached to this tool. There can be - a maximum of 1 connection - resource attached to the tool. + a maximum of 1 connection resource attached to the tool. :vartype custom_search_configuration: ~azure.ai.projects.models.WebSearchConfiguration """ type: Literal[ToolType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the web search tool. One of ``web_search`` or ``web_search_2025_08_26``. Required.""" + """The type of the web search tool. One of ``web_search`` or ``web_search_2025_08_26``. Required. + WEB_SEARCH.""" filters: Optional["_models.WebSearchToolFilters"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -13346,8 +11964,8 @@ class WebSearchTool(Tool, discriminator="web_search"): custom_search_configuration: Optional["_models.WebSearchConfiguration"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" + """The project connections attached to this tool. There can be a maximum of 1 connection resource + attached to the tool.""" @overload def __init__( @@ -13433,82 +12051,19 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = RecurrenceType.WEEKLY # type: ignore -class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_action"): - """WorkflowActionOutputItemResource. - - :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or - a str type. - :vartype created_by: ~azure.ai.projects.models.CreatedBy or str - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION - :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. - :vartype kind: str - :ivar action_id: Unique identifier for the action. Required. - :vartype action_id: str - :ivar parent_action_id: ID of the parent action if this is a nested action. - :vartype parent_action_id: str - :ivar previous_action_id: ID of the previous action if this action follows another. - :vartype previous_action_id: str - :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). - Required. Is one of the following types: Literal["completed"], Literal["failed"], - Literal["in_progress"], Literal["cancelled"] - :vartype status: str or str or str or str - """ - - type: Literal[ItemResourceType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - kind: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required.""" - action_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique identifier for the action. Required.""" - parent_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """ID of the parent action if this is a nested action.""" - previous_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """ID of the previous action if this action follows another.""" - status: Literal["completed", "failed", "in_progress", "cancelled"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). Required. Is - one of the following types: Literal[\"completed\"], Literal[\"failed\"], - Literal[\"in_progress\"], Literal[\"cancelled\"]""" - - @overload - def __init__( - self, - *, - kind: str, - action_id: str, - status: Literal["completed", "failed", "in_progress", "cancelled"], - created_by: Optional[Union["_models.CreatedBy", str]] = None, - parent_action_id: Optional[str] = None, - previous_action_id: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemResourceType.WORKFLOW_ACTION # type: ignore - - class WorkflowAgentDefinition(AgentDefinition, discriminator="workflow"): """The workflow agent definition. :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. + :ivar kind: Required. WORKFLOW. :vartype kind: str or ~azure.ai.projects.models.WORKFLOW :ivar workflow: The CSDL YAML definition of the workflow. :vartype workflow: str """ kind: Literal[AgentKind.WORKFLOW] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + """Required. WORKFLOW.""" workflow: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The CSDL YAML definition of the workflow.""" diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/__init__.py index 5ae1225f30fa..7c53165b9f1d 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/__init__.py @@ -13,16 +13,16 @@ from ._patch import * # pylint: disable=unused-wildcard-import from ._operations import AgentsOperations # type: ignore -from ._operations import MemoryStoresOperations # type: ignore from ._operations import ConnectionsOperations # type: ignore from ._operations import DatasetsOperations # type: ignore -from ._operations import IndexesOperations # type: ignore from ._operations import DeploymentsOperations # type: ignore -from ._operations import RedTeamsOperations # type: ignore -from ._operations import EvaluationRulesOperations # type: ignore from ._operations import EvaluationTaxonomiesOperations # type: ignore +from ._operations import EvaluationRulesOperations # type: ignore from ._operations import EvaluatorsOperations # type: ignore +from ._operations import IndexesOperations # type: ignore from ._operations import InsightsOperations # type: ignore +from ._operations import MemoryStoresOperations # type: ignore +from ._operations import RedTeamsOperations # type: ignore from ._operations import SchedulesOperations # type: ignore from ._patch import __all__ as _patch_all @@ -31,16 +31,16 @@ __all__ = [ "AgentsOperations", - "MemoryStoresOperations", "ConnectionsOperations", "DatasetsOperations", - "IndexesOperations", "DeploymentsOperations", - "RedTeamsOperations", - "EvaluationRulesOperations", "EvaluationTaxonomiesOperations", + "EvaluationRulesOperations", "EvaluatorsOperations", + "IndexesOperations", "InsightsOperations", + "MemoryStoresOperations", + "RedTeamsOperations", "SchedulesOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 3def1c5f7d71..ee4440d3a504 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +37,7 @@ from .._configuration import AIProjectClientConfiguration from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from .._utils.serialization import Deserializer, Serializer +from ..models._enums import FoundryFeaturesOptInKeys JSON = MutableMapping[str, Any] _Unset: Any = object() @@ -52,7 +53,7 @@ def build_agents_get_request(agent_name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,12 +73,22 @@ def build_agents_get_request(agent_name: str, **kwargs: Any) -> HttpRequest: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_agents_create_request(**kwargs: Any) -> HttpRequest: +def build_agents_create_request( + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -87,6 +98,8 @@ def build_agents_create_request(**kwargs: Any) -> HttpRequest: _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if foundry_features is not None: + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -94,12 +107,23 @@ def build_agents_create_request(**kwargs: Any) -> HttpRequest: return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_agents_update_request(agent_name: str, **kwargs: Any) -> HttpRequest: +def build_agents_update_request( + agent_name: str, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -114,6 +138,8 @@ def build_agents_update_request(agent_name: str, **kwargs: Any) -> HttpRequest: _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if foundry_features is not None: + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -126,7 +152,7 @@ def build_agents_create_from_manifest_request(**kwargs: Any) -> HttpRequest: # _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -150,7 +176,7 @@ def build_agents_update_from_manifest_request( # pylint: disable=name-too-long _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -176,7 +202,7 @@ def build_agents_delete_request(agent_name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -208,14 +234,13 @@ def build_agents_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = "/agents" # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if kind is not None: _params["kind"] = _SERIALIZER.query("kind", kind, "str") if limit is not None: @@ -226,6 +251,7 @@ def build_agents_list_request( _params["after"] = _SERIALIZER.query("after", after, "str") if before is not None: _params["before"] = _SERIALIZER.query("before", before, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -233,12 +259,23 @@ def build_agents_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_agents_create_version_request(agent_name: str, **kwargs: Any) -> HttpRequest: +def build_agents_create_version_request( + agent_name: str, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -253,6 +290,8 @@ def build_agents_create_version_request(agent_name: str, **kwargs: Any) -> HttpR _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if foundry_features is not None: + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -267,7 +306,7 @@ def build_agents_create_version_from_manifest_request( # pylint: disable=name-t _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -293,7 +332,7 @@ def build_agents_get_version_request(agent_name: str, agent_version: str, **kwar _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -318,7 +357,7 @@ def build_agents_delete_version_request(agent_name: str, agent_version: str, **k _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -351,7 +390,7 @@ def build_agents_list_versions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -363,7 +402,6 @@ def build_agents_list_versions_request( _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if limit is not None: _params["limit"] = _SERIALIZER.query("limit", limit, "int") if order is not None: @@ -372,6 +410,7 @@ def build_agents_list_versions_request( _params["after"] = _SERIALIZER.query("after", after, "str") if before is not None: _params["before"] = _SERIALIZER.query("before", before, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -379,97 +418,93 @@ def build_agents_list_versions_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_agents_stream_agent_container_logs_request( # pylint: disable=name-too-long - agent_name: str, - agent_version: str, - *, - kind: Optional[Union[str, _models.ContainerLogKind]] = None, - replica_name: Optional[str] = None, - tail: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: +def build_connections_get_request(name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = "/agents/{agent_name}/versions/{agent_version}/containers/default:logstream" + _url = "/connections/{name}" path_format_arguments = { - "agent_name": _SERIALIZER.url("agent_name", agent_name, "str"), - "agent_version": _SERIALIZER.url("agent_version", agent_version, "str"), + "name": _SERIALIZER.url("name", name, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if kind is not None: - _params["kind"] = _SERIALIZER.query("kind", kind, "str") - if replica_name is not None: - _params["replica_name"] = _SERIALIZER.query("replica_name", replica_name, "str") - if tail is not None: - _params["tail"] = _SERIALIZER.query("tail", tail, "int") - return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_create_request(**kwargs: Any) -> HttpRequest: +def build_connections_get_with_credentials_request( # pylint: disable=name-too-long + name: str, **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores" + _url = "/connections/{name}/getConnectionWithCredentials" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_update_request(name: str, **kwargs: Any) -> HttpRequest: +def build_connections_list_request( + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores/{name}" - path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore + _url = "/connections" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if connection_type is not None: + _params["connectionType"] = _SERIALIZER.query("connection_type", connection_type, "str") + if default_connection is not None: + _params["defaultConnection"] = _SERIALIZER.query("default_connection", default_connection, "bool") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_get_request(name: str, **kwargs: Any) -> HttpRequest: +def build_datasets_list_versions_request(name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores/{name}" + _url = "/datasets/{name}/versions" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -485,33 +520,18 @@ def build_memory_stores_get_request(name: str, **kwargs: Any) -> HttpRequest: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_list_request( - *, - limit: Optional[int] = None, - order: Optional[Union[str, _models.PageOrder]] = None, - after: Optional[str] = None, - before: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: +def build_datasets_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores" + _url = "/datasets" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if limit is not None: - _params["limit"] = _SERIALIZER.query("limit", limit, "int") - if order is not None: - _params["order"] = _SERIALIZER.query("order", order, "str") - if after is not None: - _params["after"] = _SERIALIZER.query("after", after, "str") - if before is not None: - _params["before"] = _SERIALIZER.query("before", before, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -519,17 +539,18 @@ def build_memory_stores_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_delete_request(name: str, **kwargs: Any) -> HttpRequest: +def build_datasets_get_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores/{name}" + _url = "/datasets/{name}/versions/{version}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -540,23 +561,18 @@ def build_memory_stores_delete_request(name: str, **kwargs: Any) -> HttpRequest: # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_search_memories_request( # pylint: disable=name-too-long - name: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) +def build_datasets_delete_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - accept = _headers.pop("Accept", "application/json") - + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) # Construct URL - _url = "/memory_stores/{name}:search_memories" + _url = "/datasets/{name}/versions/{version}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -564,28 +580,22 @@ def build_memory_stores_search_memories_request( # pylint: disable=name-too-lon # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) -def build_memory_stores_update_memories_request( # pylint: disable=name-too-long - name: str, **kwargs: Any -) -> HttpRequest: +def build_datasets_create_or_update_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores/{name}:update_memories" + _url = "/datasets/{name}/versions/{version}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -598,21 +608,22 @@ def build_memory_stores_update_memories_request( # pylint: disable=name-too-lon _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_memory_stores_delete_scope_request(name: str, **kwargs: Any) -> HttpRequest: +def build_datasets_pending_upload_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/memory_stores/{name}:delete_scope" + _url = "/datasets/{name}/versions/{version}/startPendingUpload" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -628,17 +639,18 @@ def build_memory_stores_delete_scope_request(name: str, **kwargs: Any) -> HttpRe return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_connections_get_request(name: str, **kwargs: Any) -> HttpRequest: +def build_datasets_get_credentials_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/connections/{name}" + _url = "/datasets/{name}/versions/{version}/credentials" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -649,20 +661,18 @@ def build_connections_get_request(name: str, **kwargs: Any) -> HttpRequest: # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_connections_get_with_credentials_request( # pylint: disable=name-too-long - name: str, **kwargs: Any -) -> HttpRequest: +def build_deployments_get_request(name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/connections/{name}/getConnectionWithCredentials" + _url = "/deployments/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -675,30 +685,33 @@ def build_connections_get_with_credentials_request( # pylint: disable=name-too- # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_connections_list_request( +def build_deployments_list_request( *, - connection_type: Optional[Union[str, _models.ConnectionType]] = None, - default_connection: Optional[bool] = None, + model_publisher: Optional[str] = None, + model_name: Optional[str] = None, + deployment_type: Optional[Union[str, _models.DeploymentType]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/connections" + _url = "/deployments" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if connection_type is not None: - _params["connectionType"] = _SERIALIZER.query("connection_type", connection_type, "str") - if default_connection is not None: - _params["defaultConnection"] = _SERIALIZER.query("default_connection", default_connection, "bool") + if model_publisher is not None: + _params["modelPublisher"] = _SERIALIZER.query("model_publisher", model_publisher, "str") + if model_name is not None: + _params["modelName"] = _SERIALIZER.query("model_name", model_name, "str") + if deployment_type is not None: + _params["deploymentType"] = _SERIALIZER.query("deployment_type", deployment_type, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -706,15 +719,15 @@ def build_connections_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_list_versions_request(name: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_taxonomies_get_request(name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/datasets/{name}/versions" + _url = "/evaluationtaxonomies/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -730,18 +743,24 @@ def build_datasets_list_versions_request(name: str, **kwargs: Any) -> HttpReques return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_list_request(**kwargs: Any) -> HttpRequest: +def build_evaluation_taxonomies_list_request( + *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/datasets" + _url = "/evaluationtaxonomies" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if input_name is not None: + _params["inputName"] = _SERIALIZER.query("input_name", input_name, "str") + if input_type is not None: + _params["inputType"] = _SERIALIZER.query("input_type", input_type, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -749,18 +768,17 @@ def build_datasets_list_request(**kwargs: Any) -> HttpRequest: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_get_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_taxonomies_delete_request( # pylint: disable=name-too-long + name: str, **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - accept = _headers.pop("Accept", "application/json") - + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) # Construct URL - _url = "/datasets/{name}/versions/{version}" + _url = "/evaluationtaxonomies/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -769,20 +787,27 @@ def build_datasets_get_request(name: str, version: str, **kwargs: Any) -> HttpRe _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_delete_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_taxonomies_create_request( # pylint: disable=name-too-long + name: str, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = "/datasets/{name}/versions/{version}" + _url = "/evaluationtaxonomies/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -790,22 +815,33 @@ def build_datasets_delete_request(name: str, version: str, **kwargs: Any) -> Htt # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + # Construct headers + if foundry_features is not None: + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_create_or_update_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_taxonomies_update_request( # pylint: disable=name-too-long + name: str, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/datasets/{name}/versions/{version}" + _url = "/evaluationtaxonomies/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -814,6 +850,8 @@ def build_datasets_create_or_update_request(name: str, version: str, **kwargs: A _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if foundry_features is not None: + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -821,19 +859,17 @@ def build_datasets_create_or_update_request(name: str, version: str, **kwargs: A return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_pending_upload_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_rules_get_request(id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/datasets/{name}/versions/{version}/startPendingUpload" + _url = "/evaluationrules/{id}" path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), + "id": _SERIALIZER.url("id", id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -842,25 +878,20 @@ def build_datasets_pending_upload_request(name: str, version: str, **kwargs: Any _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_get_credentials_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_rules_delete_request(id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - accept = _headers.pop("Accept", "application/json") - + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) # Construct URL - _url = "/datasets/{name}/versions/{version}/credentials" + _url = "/evaluationrules/{id}" path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), + "id": _SERIALIZER.url("id", id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -869,22 +900,24 @@ def build_datasets_get_credentials_request(name: str, version: str, **kwargs: An _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_list_versions_request(name: str, **kwargs: Any) -> HttpRequest: +def build_evaluation_rules_create_or_update_request( # pylint: disable=name-too-long + id: str, **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/indexes/{name}/versions" + _url = "/evaluationrules/{id}" path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), + "id": _SERIALIZER.url("id", id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -893,23 +926,37 @@ def build_indexes_list_versions_request(name: str, **kwargs: Any) -> HttpRequest _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_list_request(**kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) +def build_evaluation_rules_list_request( + *, + action_type: Optional[Union[str, _models.EvaluationRuleActionType]] = None, + agent_name: Optional[str] = None, + enabled: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/indexes" + _url = "/evaluationrules" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if action_type is not None: + _params["actionType"] = _SERIALIZER.query("action_type", action_type, "str") + if agent_name is not None: + _params["agentName"] = _SERIALIZER.query("agent_name", agent_name, "str") + if enabled is not None: + _params["enabled"] = _SERIALIZER.query("enabled", enabled, "bool") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -917,60 +964,87 @@ def build_indexes_list_request(**kwargs: Any) -> HttpRequest: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_get_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluators_list_versions_request( + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + limit: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/indexes/{name}/versions/{version}" + _url = "/evaluators/{name}/versions" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if type is not None: + _params["type"] = _SERIALIZER.query("type", type, "str") + if limit is not None: + _params["limit"] = _SERIALIZER.query("limit", limit, "int") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_delete_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluators_list_latest_versions_request( # pylint: disable=name-too-long + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + limit: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - # Construct URL - _url = "/indexes/{name}/versions/{version}" - path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), - } + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) + accept = _headers.pop("Accept", "application/json") - _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct URL + _url = "/evaluators" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if type is not None: + _params["type"] = _SERIALIZER.query("type", type, "str") + if limit is not None: + _params["limit"] = _SERIALIZER.query("limit", limit, "int") - return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_create_or_update_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_evaluators_get_version_request( + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/indexes/{name}/versions/{version}" + _url = "/evaluators/{name}/versions/{version}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), "version": _SERIALIZER.url("version", version, "str"), @@ -982,24 +1056,28 @@ def build_indexes_create_or_update_request(name: str, version: str, **kwargs: An _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_deployments_get_request(name: str, **kwargs: Any) -> HttpRequest: +def build_evaluators_delete_version_request( + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - accept = _headers.pop("Accept", "application/json") - + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) # Construct URL - _url = "/deployments/{name}" + _url = "/evaluators/{name}/versions/{version}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1008,53 +1086,60 @@ def build_deployments_get_request(name: str, **kwargs: Any) -> HttpRequest: _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_deployments_list_request( - *, - model_publisher: Optional[str] = None, - model_name: Optional[str] = None, - deployment_type: Optional[Union[str, _models.DeploymentType]] = None, - **kwargs: Any +def build_evaluators_create_version_request( + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/deployments" + _url = "/evaluators/{name}/versions" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if model_publisher is not None: - _params["modelPublisher"] = _SERIALIZER.query("model_publisher", model_publisher, "str") - if model_name is not None: - _params["modelName"] = _SERIALIZER.query("model_name", model_name, "str") - if deployment_type is not None: - _params["deploymentType"] = _SERIALIZER.query("deployment_type", deployment_type, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_red_teams_get_request(name: str, **kwargs: Any) -> HttpRequest: +def build_evaluators_update_version_request( + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/redTeams/runs/{name}" + _url = "/evaluators/{name}/versions/{version}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1063,20 +1148,28 @@ def build_red_teams_get_request(name: str, **kwargs: Any) -> HttpRequest: _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_red_teams_list_request(**kwargs: Any) -> HttpRequest: +def build_indexes_list_versions_request(name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/redTeams/runs" + _url = "/indexes/{name}/versions" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -1087,39 +1180,37 @@ def build_red_teams_list_request(**kwargs: Any) -> HttpRequest: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_red_teams_create_request(**kwargs: Any) -> HttpRequest: +def build_indexes_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/redTeams/runs:run" + _url = "/indexes" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_rules_get_request(id: str, **kwargs: Any) -> HttpRequest: +def build_indexes_get_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationrules/{id}" + _url = "/indexes/{name}/versions/{version}" path_format_arguments = { - "id": _SERIALIZER.url("id", id, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1133,15 +1224,15 @@ def build_evaluation_rules_get_request(id: str, **kwargs: Any) -> HttpRequest: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_rules_delete_request(id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) +def build_indexes_delete_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) # Construct URL - _url = "/evaluationrules/{id}" + _url = "/indexes/{name}/versions/{version}" path_format_arguments = { - "id": _SERIALIZER.url("id", id, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1149,25 +1240,22 @@ def build_evaluation_rules_delete_request(id: str, **kwargs: Any) -> HttpRequest # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - # Construct headers - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) -def build_evaluation_rules_create_or_update_request( # pylint: disable=name-too-long - id: str, **kwargs: Any -) -> HttpRequest: +def build_indexes_create_or_update_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationrules/{id}" + _url = "/indexes/{name}/versions/{version}" path_format_arguments = { - "id": _SERIALIZER.url("id", id, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1180,57 +1268,59 @@ def build_evaluation_rules_create_or_update_request( # pylint: disable=name-too _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_rules_list_request( - *, - action_type: Optional[Union[str, _models.EvaluationRuleActionType]] = None, - agent_name: Optional[str] = None, - enabled: Optional[bool] = None, - **kwargs: Any +def build_insights_generate_request( + *, foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationrules" + _url = "/insights" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if action_type is not None: - _params["actionType"] = _SERIALIZER.query("action_type", action_type, "str") - if agent_name is not None: - _params["agentName"] = _SERIALIZER.query("agent_name", agent_name, "str") - if enabled is not None: - _params["enabled"] = _SERIALIZER.query("enabled", enabled, "bool") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if "Repeatability-Request-ID" not in _headers: + _headers["Repeatability-Request-ID"] = str(uuid.uuid4()) + if "Repeatability-First-Sent" not in _headers: + _headers["Repeatability-First-Sent"] = _SERIALIZER.serialize_data( + datetime.datetime.now(datetime.timezone.utc), "rfc-1123" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_taxonomies_get_request(name: str, **kwargs: Any) -> HttpRequest: +def build_insights_get_request(id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationtaxonomies/{name}" + _url = "/insights/{id}" path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), + "id": _SERIALIZER.url("id", id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if include_coordinates is not None: + _params["includeCoordinates"] = _SERIALIZER.query("include_coordinates", include_coordinates, "bool") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -1238,24 +1328,36 @@ def build_evaluation_taxonomies_get_request(name: str, **kwargs: Any) -> HttpReq return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_taxonomies_list_request( - *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any +def build_insights_list_request( + *, + type: Optional[Union[str, _models.InsightType]] = None, + eval_id: Optional[str] = None, + run_id: Optional[str] = None, + agent_name: Optional[str] = None, + include_coordinates: Optional[bool] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationtaxonomies" + _url = "/insights" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if input_name is not None: - _params["inputName"] = _SERIALIZER.query("input_name", input_name, "str") - if input_type is not None: - _params["inputType"] = _SERIALIZER.query("input_type", input_type, "str") + if type is not None: + _params["type"] = _SERIALIZER.query("type", type, "str") + if eval_id is not None: + _params["evalId"] = _SERIALIZER.query("eval_id", eval_id, "str") + if run_id is not None: + _params["runId"] = _SERIALIZER.query("run_id", run_id, "str") + if agent_name is not None: + _params["agentName"] = _SERIALIZER.query("agent_name", agent_name, "str") + if include_coordinates is not None: + _params["includeCoordinates"] = _SERIALIZER.query("include_coordinates", include_coordinates, "bool") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -1263,41 +1365,43 @@ def build_evaluation_taxonomies_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_taxonomies_delete_request( # pylint: disable=name-too-long - name: str, **kwargs: Any +def build_memory_stores_create_request( + *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - # Construct URL - _url = "/evaluationtaxonomies/{name}" - path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - } + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) + accept = _headers.pop("Accept", "application/json") - _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct URL + _url = "/memory_stores" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_taxonomies_create_request( # pylint: disable=name-too-long - name: str, **kwargs: Any +def build_memory_stores_update_request( + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationtaxonomies/{name}" + _url = "/memory_stores/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -1308,25 +1412,25 @@ def build_evaluation_taxonomies_create_request( # pylint: disable=name-too-long _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluation_taxonomies_update_request( # pylint: disable=name-too-long - name: str, **kwargs: Any +def build_memory_stores_get_request( + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluationtaxonomies/{name}" + _url = "/memory_stores/{name}" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -1337,87 +1441,89 @@ def build_evaluation_taxonomies_update_request( # pylint: disable=name-too-long _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluators_list_versions_request( - name: str, +def build_memory_stores_list_request( *, - type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], limit: Optional[int] = None, + order: Optional[Union[str, _models.PageOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluators/{name}/versions" - path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore + _url = "/memory_stores" # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if type is not None: - _params["type"] = _SERIALIZER.query("type", type, "str") if limit is not None: _params["limit"] = _SERIALIZER.query("limit", limit, "int") + if order is not None: + _params["order"] = _SERIALIZER.query("order", order, "str") + if after is not None: + _params["after"] = _SERIALIZER.query("after", after, "str") + if before is not None: + _params["before"] = _SERIALIZER.query("before", before, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluators_list_latest_versions_request( # pylint: disable=name-too-long - *, - type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, - limit: Optional[int] = None, - **kwargs: Any +def build_memory_stores_delete_request( + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluators" + _url = "/memory_stores/{name}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if type is not None: - _params["type"] = _SERIALIZER.query("type", type, "str") - if limit is not None: - _params["limit"] = _SERIALIZER.query("limit", limit, "int") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluators_get_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_memory_stores_search_memories_request( # pylint: disable=name-too-long + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluators/{name}/versions/{version}" + _url = "/memory_stores/{name}:search_memories" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1426,40 +1532,26 @@ def build_evaluators_get_version_request(name: str, version: str, **kwargs: Any) _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_evaluators_delete_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) - # Construct URL - _url = "/evaluators/{name}/versions/{version}" - path_format_arguments = { - "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluators_create_version_request(name: str, **kwargs: Any) -> HttpRequest: +def build_memory_stores_update_memories_request( # pylint: disable=name-too-long + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluators/{name}/versions" + _url = "/memory_stores/{name}:update_memories" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -1470,6 +1562,7 @@ def build_evaluators_create_version_request(name: str, **kwargs: Any) -> HttpReq _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -1477,19 +1570,20 @@ def build_evaluators_create_version_request(name: str, **kwargs: Any) -> HttpReq return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_evaluators_update_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_memory_stores_delete_scope_request( + name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/evaluators/{name}/versions/{version}" + _url = "/memory_stores/{name}:delete_scope" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), - "version": _SERIALIZER.url("version", version, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1498,60 +1592,50 @@ def build_evaluators_update_version_request(name: str, version: str, **kwargs: A _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_insights_generate_request(**kwargs: Any) -> HttpRequest: +def build_red_teams_get_request(name: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/insights" + _url = "/redTeams/runs/{name}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if "Repeatability-Request-ID" not in _headers: - _headers["Repeatability-Request-ID"] = str(uuid.uuid4()) - if "Repeatability-First-Sent" not in _headers: - _headers["Repeatability-First-Sent"] = _SERIALIZER.serialize_data( - datetime.datetime.now(datetime.timezone.utc), "rfc-1123" - ) - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_insights_get_request(id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> HttpRequest: +def build_red_teams_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/insights/{id}" - path_format_arguments = { - "id": _SERIALIZER.url("id", id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore + _url = "/redTeams/runs" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if include_coordinates is not None: - _params["includeCoordinates"] = _SERIALIZER.query("include_coordinates", include_coordinates, "bool") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -1559,48 +1643,37 @@ def build_insights_get_request(id: str, *, include_coordinates: Optional[bool] = return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_insights_list_request( - *, - type: Optional[Union[str, _models.InsightType]] = None, - eval_id: Optional[str] = None, - run_id: Optional[str] = None, - agent_name: Optional[str] = None, - include_coordinates: Optional[bool] = None, - **kwargs: Any +def build_red_teams_create_request( + *, foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/insights" + _url = "/redTeams/runs:run" # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if type is not None: - _params["type"] = _SERIALIZER.query("type", type, "str") - if eval_id is not None: - _params["evalId"] = _SERIALIZER.query("eval_id", eval_id, "str") - if run_id is not None: - _params["runId"] = _SERIALIZER.query("run_id", run_id, "str") - if agent_name is not None: - _params["agentName"] = _SERIALIZER.query("agent_name", agent_name, "str") - if include_coordinates is not None: - _params["includeCoordinates"] = _SERIALIZER.query("include_coordinates", include_coordinates, "bool") # Construct headers + if foundry_features is not None: + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_schedules_delete_request(id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) # Construct URL _url = "/schedules/{id}" path_format_arguments = { @@ -1621,7 +1694,7 @@ def build_schedules_get_request(id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1645,7 +1718,7 @@ def build_schedules_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1665,7 +1738,7 @@ def build_schedules_create_or_update_request(id: str, **kwargs: Any) -> HttpRequ _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1687,18 +1760,24 @@ def build_schedules_create_or_update_request(id: str, **kwargs: Any) -> HttpRequ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_schedules_get_run_request(schedule_id: str, run_id: str, **kwargs: Any) -> HttpRequest: +def build_schedules_get_run_request( + schedule_id: str, + run_id: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + **kwargs: Any +) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/schedules/{scheduleId}/runs/{runId}" + _url = "/schedules/{schedule_id}/runs/{run_id}" path_format_arguments = { - "scheduleId": _SERIALIZER.url("schedule_id", schedule_id, "str"), - "runId": _SERIALIZER.url("run_id", run_id, "str"), + "schedule_id": _SERIALIZER.url("schedule_id", schedule_id, "str"), + "run_id": _SERIALIZER.url("run_id", run_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -1707,6 +1786,7 @@ def build_schedules_get_run_request(schedule_id: str, run_id: str, **kwargs: Any _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + _headers["Foundry-Features"] = _SERIALIZER.header("foundry_features", foundry_features, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) @@ -1716,7 +1796,7 @@ def build_schedules_list_runs_request(id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "v1")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1823,6 +1903,13 @@ def create( *, name: str, definition: _models.AgentDefinition, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, content_type: str = "application/json", metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, @@ -1840,6 +1927,14 @@ def create( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1858,11 +1953,32 @@ def create( """ @overload - def create(self, body: JSON, *, content_type: str = "application/json", **kwargs: Any) -> _models.AgentDetails: + def create( + self, + body: JSON, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AgentDetails: """Creates the agent. :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1872,11 +1988,32 @@ def create(self, body: JSON, *, content_type: str = "application/json", **kwargs """ @overload - def create(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> _models.AgentDetails: + def create( + self, + body: IO[bytes], + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AgentDetails: """Creates the agent. :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -1892,6 +2029,13 @@ def create( *, name: str = _Unset, definition: _models.AgentDefinition = _Unset, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any @@ -1910,6 +2054,14 @@ def create( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. @@ -1952,6 +2104,7 @@ def create( _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore _request = build_agents_create_request( + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -1999,19 +2152,34 @@ def update( agent_name: str, *, definition: _models.AgentDefinition, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, content_type: str = "application/json", metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2031,15 +2199,35 @@ def update( @overload def update( - self, agent_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: JSON, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2050,15 +2238,35 @@ def update( @overload def update( - self, agent_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: IO[bytes], + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -2074,12 +2282,19 @@ def update( body: Union[JSON, IO[bytes]] = _Unset, *, definition: _models.AgentDefinition = _Unset, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any ) -> _models.AgentDetails: - """Updates the agent by adding a new version if there are any changes to the agent definition. - If no changes, returns the existing agent version. + """Updates the agent by adding a new version if there are any changes to the agent definition. If + no changes, returns the existing agent version. :param agent_name: The name of the agent to retrieve. Required. :type agent_name: str @@ -2088,6 +2303,14 @@ def update( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. @@ -2129,6 +2352,7 @@ def update( _request = build_agents_update_request( agent_name=agent_name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -2377,8 +2601,7 @@ def update_from_manifest( **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -2409,8 +2632,7 @@ def update_from_manifest( self, agent_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -2429,8 +2651,7 @@ def update_from_manifest( self, agent_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -2457,8 +2678,7 @@ def update_from_manifest( **kwargs: Any ) -> _models.AgentDetails: """Updates the agent from a manifest by adding a new version if there are any changes to the agent - definition. - If no changes, returns the existing agent version. + definition. If no changes, returns the existing agent version. :param agent_name: The name of the agent to update. Required. :type agent_name: str @@ -2720,6 +2940,13 @@ def create_version( agent_name: str, *, definition: _models.AgentDefinition, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, content_type: str = "application/json", metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, @@ -2737,6 +2964,14 @@ def create_version( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2756,7 +2991,19 @@ def create_version( @overload def create_version( - self, agent_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: JSON, + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentVersionDetails: """Create a new agent version. @@ -2769,6 +3016,14 @@ def create_version( :type agent_name: str :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2779,7 +3034,19 @@ def create_version( @overload def create_version( - self, agent_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + agent_name: str, + body: IO[bytes], + *, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, + content_type: str = "application/json", + **kwargs: Any ) -> _models.AgentVersionDetails: """Create a new agent version. @@ -2792,6 +3059,14 @@ def create_version( :type agent_name: str :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -2807,6 +3082,13 @@ def create_version( body: Union[JSON, IO[bytes]] = _Unset, *, definition: _models.AgentDefinition = _Unset, + foundry_features: Optional[ + Union[ + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW], + ] + ] = None, metadata: Optional[dict[str, str]] = None, description: Optional[str] = None, **kwargs: Any @@ -2825,6 +3107,14 @@ def create_version( :keyword definition: The agent definition. This can be a workflow, hosted agent, or a simple agent definition. Required. :paramtype definition: ~azure.ai.projects.models.AgentDefinition + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. Is one of the following types: + Literal[FoundryFeaturesOptInKeys.CONTAINER_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.HOSTED_AGENTS_V1_PREVIEW], + Literal[FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW] Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.CONTAINER_AGENTS_V1_PREVIEW or + str or ~azure.ai.projects.models.HOSTED_AGENTS_V1_PREVIEW or str or + ~azure.ai.projects.models.WORKFLOW_AGENTS_V1_PREVIEW :keyword metadata: Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. @@ -2866,6 +3156,7 @@ def create_version( _request = build_agents_create_version_request( agent_name=agent_name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -3339,58 +3630,32 @@ def get_next(_continuation_token=None): return ItemPaged(get_next, extract_data) + +class ConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace - def stream_agent_container_logs( # pylint: disable=inconsistent-return-statements - self, - agent_name: str, - agent_version: str, - *, - kind: Optional[Union[str, _models.ContainerLogKind]] = None, - replica_name: Optional[str] = None, - tail: Optional[int] = None, - **kwargs: Any - ) -> None: - """Container log entry streamed from the container as text chunks. - Each chunk is a UTF-8 string that may be either a plain text log line - or a JSON-formatted log entry, depending on the type of container log being streamed. - Clients should treat each chunk as opaque text and, if needed, attempt - to parse it as JSON based on their logging requirements. - - For system logs, the format is JSON with the following structure: - {"TimeStamp":"2025-12-15T16:51:33Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting - to the events - collector...","Reason":"StartingGettingEvents","EventSource":"ContainerAppController","Count":1} - {"TimeStamp":"2025-12-15T16:51:34Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully - connected to events - server","Reason":"ConnectedToEventsServer","EventSource":"ContainerAppController","Count":1} - - For console logs, the format is plain text as emitted by the container's stdout/stderr. - 2025-12-15T08:43:48.72656 Connecting to the container 'agent-container'... - 2025-12-15T08:43:48.75451 Successfully Connected to container: 'agent-container' [Revision: - 'je90fe655aa742ef9a188b9fd14d6764--7tca06b', Replica: - 'je90fe655aa742ef9a188b9fd14d6764--7tca06b-6898b9c89f-mpkjc'] - 2025-12-15T08:33:59.0671054Z stdout F INFO: 127.0.0.1:42588 - "GET /readiness HTTP/1.1" 200 - OK - 2025-12-15T08:34:29.0649033Z stdout F INFO: 127.0.0.1:60246 - "GET /readiness HTTP/1.1" 200 - OK - 2025-12-15T08:34:59.0644467Z stdout F INFO: 127.0.0.1:43994 - "GET /readiness HTTP/1.1" 200 - OK. - - :param agent_name: The name of the agent. Required. - :type agent_name: str - :param agent_version: The version of the agent. Required. - :type agent_version: str - :keyword kind: console returns container stdout/stderr, system returns container app event - stream. defaults to console. Known values are: "console" and "system". Default value is None. - :paramtype kind: str or ~azure.ai.projects.models.ContainerLogKind - :keyword replica_name: When omitted, the server chooses the first replica for console logs. - Required to target a specific replica. Default value is None. - :paramtype replica_name: str - :keyword tail: Number of trailing lines returned. Enforced to 1-300. Defaults to 20. Default - value is None. - :paramtype tail: int - :return: None - :rtype: None + def _get(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, without populating connection credentials. + + :param name: The friendly name of the connection, provided by the user. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Connection :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3404,14 +3669,10 @@ def stream_agent_container_logs( # pylint: disable=inconsistent-return-statemen _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) - _request = build_agents_stream_agent_container_logs_request( - agent_name=agent_name, - agent_version=agent_version, - kind=kind, - replica_name=replica_name, - tail=tail, + _request = build_connections_get_request( + name=name, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3421,7 +3682,7 @@ def stream_agent_container_logs( # pylint: disable=inconsistent-return-statemen } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3429,122 +3690,37 @@ def stream_agent_container_logs( # pylint: disable=inconsistent-return-statemen response = pipeline_response.http_response if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) - if cls: - return cls(pipeline_response, None, {}) # type: ignore + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Connection, response.json()) -class MemoryStoresOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`memory_stores` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def create( - self, - *, - name: str, - definition: _models.MemoryStoreDefinition, - content_type: str = "application/json", - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. - - :keyword name: The name of the memory store. Required. - :paramtype name: str - :keyword definition: The memory store definition. Required. - :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create( - self, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. - - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create( - self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. - - :param body: Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ + return deserialized # type: ignore @distributed_trace - def create( - self, - body: Union[JSON, IO[bytes]] = _Unset, - *, - name: str = _Unset, - definition: _models.MemoryStoreDefinition = _Unset, - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Create a memory store. + def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, with its connection credentials. - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword name: The name of the memory store. Required. - :paramtype name: str - :keyword definition: The memory store definition. Required. - :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails + :param name: The friendly name of the connection, provided by the user. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Connection :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3555,30 +3731,14 @@ def create( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) - - if body is _Unset: - if name is _Unset: - raise TypeError("missing required argument: name") - if definition is _Unset: - raise TypeError("missing required argument: definition") - body = {"definition": definition, "description": description, "metadata": metadata, "name": name} - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) - _request = build_memory_stores_create_request( - content_type=content_type, + _request = build_connections_get_with_credentials_request( + name=name, api_version=self._config.api_version, - content=_content, headers=_headers, params=_params, ) @@ -3601,110 +3761,49 @@ def create( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) + deserialized = _deserialize(_models.Connection, response.json()) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - @overload - def update( + @distributed_trace + def list( self, - name: str, *, - content_type: str = "application/json", - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. - - :param name: The name of the memory store to update. Required. - :type name: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. - - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. + ) -> ItemPaged["_models.Connection"]: + """List all connections in the project, without populating connection credentials. - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails + :keyword connection_type: List connections of this specific type. Known values are: + "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", + "AppConfig", "AppInsights", "CustomKeys", and "RemoteTool_Preview". Default value is None. + :paramtype connection_type: str or ~azure.ai.projects.models.ConnectionType + :keyword default_connection: List connections that are default connections. Default value is + None. + :paramtype default_connection: bool + :return: An iterator like instance of Connection + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Connection] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - @distributed_trace - def update( - self, - name: str, - body: Union[JSON, IO[bytes]] = _Unset, - *, - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - **kwargs: Any - ) -> _models.MemoryStoreDetails: - """Update a memory store. + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) - :param name: The name of the memory store to update. Required. - :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword description: A human-readable description of the memory store. Default value is None. - :paramtype description: str - :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default - value is None. - :paramtype metadata: dict[str, str] - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3713,162 +3812,101 @@ def update( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} + def prepare_request(next_link=None): + if not next_link: - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + _request = build_connections_list_request( + connection_type=connection_type, + default_connection=default_connection, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - if body is _Unset: - body = {"description": description, "metadata": metadata} - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _request = build_memory_stores_update_request( - name=name, - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + return _request - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) - response = pipeline_response.http_response + def get_next(next_link=None): + _request = prepare_request(next_link) - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs ) - raise HttpResponseError(response=response, model=error) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: - """Retrieve a memory store. - - :param name: The name of the memory store to retrieve. Required. - :type name: str - :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDetails - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + response = pipeline_response.http_response - _request = build_memory_stores_get_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) + return pipeline_response - response = pipeline_response.http_response + return ItemPaged(get_next, extract_data) - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) +class DatasetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`datasets` attribute. + """ - return deserialized # type: ignore + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - *, - limit: Optional[int] = None, - order: Optional[Union[str, _models.PageOrder]] = None, - before: Optional[str] = None, - **kwargs: Any - ) -> ItemPaged["_models.MemoryStoreDetails"]: - """List all memory stores. + def list_versions(self, name: str, **kwargs: Any) -> ItemPaged["_models.DatasetVersion"]: + """List all versions of the given DatasetVersion. - :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the - default is 20. Default value is None. - :paramtype limit: int - :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for - ascending order and``desc`` - for descending order. Known values are: "asc" and "desc". Default value is None. - :paramtype order: str or ~azure.ai.projects.models.PageOrder - :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your - place in the list. - For instance, if you make a list request and receive 100 objects, ending with obj_foo, your - subsequent call can include before=obj_foo in order to fetch the previous page of the list. - Default value is None. - :paramtype before: str - :return: An iterator like instance of MemoryStoreDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.MemoryStoreDetails] + :param name: The name of the resource. Required. + :type name: str + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.DatasetVersion] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.MemoryStoreDetails]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3878,32 +3916,53 @@ def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(_continuation_token=None): + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_versions_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _request = build_memory_stores_list_request( - limit=limit, - order=order, - after=_continuation_token, - before=before, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.MemoryStoreDetails], deserialized.get("data", [])) + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("last_id") or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) - def get_next(_continuation_token=None): - _request = prepare_request(_continuation_token) + def get_next(next_link=None): + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access @@ -3913,26 +3972,25 @@ def get_next(_continuation_token=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) @distributed_trace - def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreResult: - """Delete a memory store. + def list(self, **kwargs: Any) -> ItemPaged["_models.DatasetVersion"]: + """List the latest version of each DatasetVersion. - :param name: The name of the memory store to delete. Required. - :type name: str - :return: DeleteMemoryStoreResult. The DeleteMemoryStoreResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DeleteMemoryStoreResult + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.DatasetVersion] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3941,13 +3999,96 @@ def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreResult: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: + """Get the specific version of the DatasetVersion. The service returns 404 Not Found error if the + DatasetVersion does not exist. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to retrieve. Required. + :type version: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DeleteMemoryStoreResult] = kwargs.pop("cls", None) + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - _request = build_memory_stores_delete_request( + _request = build_datasets_get_request( name=name, + version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3971,122 +4112,29 @@ def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreResult: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DeleteMemoryStoreResult, response.json()) + deserialized = _deserialize(_models.DatasetVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @overload - def search_memories( - self, - name: str, - *, - scope: str, - content_type: str = "application/json", - items: Optional[List[_models.InputItem]] = None, - previous_search_id: Optional[str] = None, - options: Optional[_models.MemorySearchOptions] = None, - **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. - - :param name: The name of the memory store to search. Required. - :type name: str - :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :paramtype scope: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.InputItem] - :keyword previous_search_id: The unique ID of the previous search request, enabling incremental - memory search from where the last operation left off. Default value is None. - :paramtype previous_search_id: str - :keyword options: Memory search options. Default value is None. - :paramtype options: ~azure.ai.projects.models.MemorySearchOptions - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def search_memories( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. - - :param name: The name of the memory store to search. Required. - :type name: str - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def search_memories( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. - - :param name: The name of the memory store to search. Required. - :type name: str - :param body: Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - @distributed_trace - def search_memories( - self, - name: str, - body: Union[JSON, IO[bytes]] = _Unset, - *, - scope: str = _Unset, - items: Optional[List[_models.InputItem]] = None, - previous_search_id: Optional[str] = None, - options: Optional[_models.MemorySearchOptions] = None, - **kwargs: Any - ) -> _models.MemoryStoreSearchResult: - """Search for relevant memories from a memory store based on conversation context. + def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements + """Delete the specific version of the DatasetVersion. The service returns 204 No Content if the + DatasetVersion was deleted successfully or if the DatasetVersion does not exist. - :param name: The name of the memory store to search. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :paramtype scope: str - :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.InputItem] - :keyword previous_search_id: The unique ID of the previous search request, enabling incremental - memory search from where the last operation left off. Default value is None. - :paramtype previous_search_id: str - :keyword options: Memory search options. Default value is None. - :paramtype options: ~azure.ai.projects.models.MemorySearchOptions - :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult + :param version: The version of the DatasetVersion to delete. Required. + :type version: str + :return: None + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4097,34 +4145,15 @@ def search_memories( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreSearchResult] = kwargs.pop("cls", None) - - if body is _Unset: - if scope is _Unset: - raise TypeError("missing required argument: scope") - body = { - "items": items, - "options": options, - "previous_search_id": previous_search_id, - "scope": scope, - } - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_memory_stores_search_memories_request( + _request = build_datasets_delete_request( name=name, - content_type=content_type, + version=version, api_version=self._config.api_version, - content=_content, headers=_headers, params=_params, ) @@ -4133,47 +4162,115 @@ def search_memories( } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass + if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.MemoryStoreSearchResult, response.json()) + raise HttpResponseError(response=response) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore + return cls(pipeline_response, None, {}) # type: ignore - def _update_memories_initial( + @overload + def create_or_update( self, name: str, - body: Union[JSON, IO[bytes]] = _Unset, + version: str, + dataset_version: _models.DatasetVersion, *, - scope: str = _Unset, - items: Optional[List[_models.InputItem]] = None, - previous_update_id: Optional[str] = None, - update_delay: Optional[int] = None, + content_type: str = "application/merge-patch+json", **kwargs: Any - ) -> Iterator[bytes]: + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Required. + :type dataset_version: ~azure.ai.projects.models.DatasetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + name: str, + version: str, + dataset_version: JSON, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Required. + :type dataset_version: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + name: str, + version: str, + dataset_version: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Required. + :type dataset_version: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, name: str, version: str, dataset_version: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or update an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or update. Required. + :type version: str + :param dataset_version: The DatasetVersion to create or update. Is one of the following types: + DatasetVersion, JSON, IO[bytes] Required. + :type dataset_version: ~azure.ai.projects.models.DatasetVersion or JSON or IO[bytes] + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4186,27 +4283,18 @@ def _update_memories_initial( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - if body is _Unset: - if scope is _Unset: - raise TypeError("missing required argument: scope") - body = { - "items": items, - "previous_update_id": previous_update_id, - "scope": scope, - "update_delay": update_delay, - } - body = {k: v for k, v in body.items() if v is not None} - content_type = content_type or "application/json" + content_type = content_type or "application/merge-patch+json" _content = None - if isinstance(body, (IOBase, bytes)): - _content = body + if isinstance(dataset_version, (IOBase, bytes)): + _content = dataset_version else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(dataset_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_memory_stores_update_memories_request( + _request = build_datasets_create_or_update_request( name=name, + version=version, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -4218,228 +4306,130 @@ def _update_memories_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = True + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [202]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + raise HttpResponseError(response=response) - deserialized = response.iter_bytes() + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.DatasetVersion, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore @overload - def _begin_update_memories( + def pending_upload( self, name: str, + version: str, + pending_upload_request: _models.PendingUploadRequest, *, - scope: str, content_type: str = "application/json", - items: Optional[List[_models.InputItem]] = None, - previous_update_id: Optional[str] = None, - update_delay: Optional[int] = None, **kwargs: Any - ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... - @overload - def _begin_update_memories( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... - @overload - def _begin_update_memories( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. - @distributed_trace - def _begin_update_memories( + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Required. + :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def pending_upload( self, name: str, - body: Union[JSON, IO[bytes]] = _Unset, + version: str, + pending_upload_request: JSON, *, - scope: str = _Unset, - items: Optional[List[_models.InputItem]] = None, - previous_update_id: Optional[str] = None, - update_delay: Optional[int] = None, + content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: - """Update memory store with conversation memories. + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. - :param name: The name of the memory store to update. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. - Required. - :paramtype scope: str - :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.InputItem] - :keyword previous_update_id: The unique ID of the previous update request, enabling incremental - memory updates from where the last operation left off. Default value is None. - :paramtype previous_update_id: str - :keyword update_delay: Timeout period before processing the memory update in seconds. - If a new update request is received during this period, it will cancel the current request and - reset the timeout. - Set to 0 to immediately trigger the update without delay. - Defaults to 300 (5 minutes). Default value is None. - :paramtype update_delay: int - :return: An instance of LROPoller that returns MemoryStoreUpdateCompletedResult. The - MemoryStoreUpdateCompletedResult is compatible with MutableMapping - :rtype: - ~azure.core.polling.LROPoller[~azure.ai.projects.models.MemoryStoreUpdateCompletedResult] + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Required. + :type pending_upload_request: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreUpdateCompletedResult] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._update_memories_initial( - name=name, - body=body, - scope=scope, - items=items, - previous_update_id=previous_update_id, - update_delay=update_delay, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) + @overload + def pending_upload( + self, + name: str, + version: str, + pending_upload_request: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.MemoryStoreUpdateCompletedResult, response.json().get("result", {})) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.MemoryStoreUpdateCompletedResult].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.MemoryStoreUpdateCompletedResult]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @overload - def delete_scope( - self, name: str, *, scope: str, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. - - :param name: The name of the memory store. Required. - :type name: str - :keyword scope: The namespace that logically groups and isolates memories to delete, such as a - user ID. Required. - :paramtype scope: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def delete_scope( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. - - :param name: The name of the memory store. Required. - :type name: str - :param body: Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def delete_scope( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. - - :param name: The name of the memory store. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Required. - :type body: IO[bytes] + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Required. + :type pending_upload_request: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def delete_scope( - self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, **kwargs: Any - ) -> _models.MemoryStoreDeleteScopeResult: - """Delete all memories associated with a specific scope from a memory store. + def pending_upload( + self, + name: str, + version: str, + pending_upload_request: Union[_models.PendingUploadRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. - :param name: The name of the memory store. Required. + :param name: The name of the resource. Required. :type name: str - :param body: Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword scope: The namespace that logically groups and isolates memories to delete, such as a - user ID. Required. - :paramtype scope: str - :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with - MutableMapping - :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param pending_upload_request: The pending upload request parameters. Is one of the following + types: PendingUploadRequest, JSON, IO[bytes] Required. + :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest or JSON or + IO[bytes] + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.PendingUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4454,22 +4444,18 @@ def delete_scope( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.MemoryStoreDeleteScopeResult] = kwargs.pop("cls", None) + cls: ClsType[_models.PendingUploadResponse] = kwargs.pop("cls", None) - if body is _Unset: - if scope is _Unset: - raise TypeError("missing required argument: scope") - body = {"scope": scope} - body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None - if isinstance(body, (IOBase, bytes)): - _content = body + if isinstance(pending_upload_request, (IOBase, bytes)): + _content = pending_upload_request else: - _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(pending_upload_request, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_memory_stores_delete_scope_request( + _request = build_datasets_pending_upload_request( name=name, + version=version, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -4495,48 +4481,28 @@ def delete_scope( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize( - _models.ApiErrorResponse, - response, - ) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.MemoryStoreDeleteScopeResult, response.json()) + deserialized = _deserialize(_models.PendingUploadResponse, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - -class ConnectionsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`connections` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def _get(self, name: str, **kwargs: Any) -> _models.Connection: - """Get a connection by name, without populating connection credentials. + def get_credentials(self, name: str, version: str, **kwargs: Any) -> _models.DatasetCredential: + """Get the SAS credential to access the storage account associated with a Dataset version. - :param name: The friendly name of the connection, provided by the user. Required. + :param name: The name of the resource. Required. :type name: str - :return: Connection. The Connection is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Connection + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :return: DatasetCredential. The DatasetCredential is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DatasetCredential :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4550,10 +4516,11 @@ def _get(self, name: str, **kwargs: Any) -> _models.Connection: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + cls: ClsType[_models.DatasetCredential] = kwargs.pop("cls", None) - _request = build_connections_get_request( + _request = build_datasets_get_credentials_request( name=name, + version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -4579,29 +4546,42 @@ def _get(self, name: str, **kwargs: Any) -> _models.Connection: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Connection, response.json()) + deserialized = _deserialize(_models.DatasetCredential, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + +class DeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`deployments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace - def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: - """Get a connection by name, with its connection credentials. + def get(self, name: str, **kwargs: Any) -> _models.Deployment: + """Get a deployed model. - :param name: The friendly name of the connection, provided by the user. Required. + :param name: Name of the deployment. Required. :type name: str - :return: Connection. The Connection is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Connection + :return: Deployment. The Deployment is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Deployment :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4615,9 +4595,9 @@ def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + cls: ClsType[_models.Deployment] = kwargs.pop("cls", None) - _request = build_connections_get_with_credentials_request( + _request = build_deployments_get_request( name=name, api_version=self._config.api_version, headers=_headers, @@ -4652,7 +4632,7 @@ def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Connection, response.json()) + deserialized = _deserialize(_models.Deployment, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4663,27 +4643,29 @@ def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: def list( self, *, - connection_type: Optional[Union[str, _models.ConnectionType]] = None, - default_connection: Optional[bool] = None, + model_publisher: Optional[str] = None, + model_name: Optional[str] = None, + deployment_type: Optional[Union[str, _models.DeploymentType]] = None, **kwargs: Any - ) -> ItemPaged["_models.Connection"]: - """List all connections in the project, without populating connection credentials. + ) -> ItemPaged["_models.Deployment"]: + """List all deployed models in the project. - :keyword connection_type: List connections of this specific type. Known values are: - "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", - "AppConfig", "AppInsights", "CustomKeys", and "RemoteTool". Default value is None. - :paramtype connection_type: str or ~azure.ai.projects.models.ConnectionType - :keyword default_connection: List connections that are default connections. Default value is - None. - :paramtype default_connection: bool - :return: An iterator like instance of Connection - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Connection] + :keyword model_publisher: Model publisher to filter models by. Default value is None. + :paramtype model_publisher: str + :keyword model_name: Model name (the publisher specific name) to filter models by. Default + value is None. + :paramtype model_name: str + :keyword deployment_type: Type of deployment to filter list by. "ModelDeployment" Default value + is None. + :paramtype deployment_type: str or ~azure.ai.projects.models.DeploymentType + :return: An iterator like instance of Deployment + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Deployment] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Deployment]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4696,12 +4678,13 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_connections_list_request( - connection_type=connection_type, - default_connection=default_connection, - api_version=self._config.api_version, - headers=_headers, - params=_params, + _request = build_deployments_list_request( + model_publisher=model_publisher, + model_name=model_name, + deployment_type=deployment_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "endpoint": self._serialize.url( @@ -4734,7 +4717,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Deployment], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -4757,14 +4740,14 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class DatasetsOperations: +class EvaluationTaxonomiesOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`datasets` attribute. + :attr:`evaluation_taxonomies` attribute. """ def __init__(self, *args, **kwargs) -> None: @@ -4775,20 +4758,15 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list_versions(self, name: str, **kwargs: Any) -> ItemPaged["_models.DatasetVersion"]: - """List all versions of the given DatasetVersion. + def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: + """Get an evaluation run by name. :param name: The name of the resource. Required. :type name: str - :return: An iterator like instance of DatasetVersion - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.DatasetVersion] + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) - error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4797,80 +4775,71 @@ def list_versions(self, name: str, **kwargs: Any) -> ItemPaged["_models.DatasetV } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - _request = build_datasets_list_versions_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + _request = build_evaluation_taxonomies_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - return _request + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + response = pipeline_response.http_response - def get_next(next_link=None): - _request = prepare_request(next_link) + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return ItemPaged(get_next, extract_data) + return deserialized # type: ignore @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.DatasetVersion"]: - """List the latest version of each DatasetVersion. + def list( + self, *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any + ) -> ItemPaged["_models.EvaluationTaxonomy"]: + """List evaluation taxonomies. - :return: An iterator like instance of DatasetVersion - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.DatasetVersion] + :keyword input_name: Filter by the evaluation input name. Default value is None. + :paramtype input_name: str + :keyword input_type: Filter by taxonomy input type. Default value is None. + :paramtype input_type: str + :return: An iterator like instance of EvaluationTaxonomy + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluationTaxonomy] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.EvaluationTaxonomy]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4883,7 +4852,9 @@ def list(self, **kwargs: Any) -> ItemPaged["_models.DatasetVersion"]: def prepare_request(next_link=None): if not next_link: - _request = build_datasets_list_request( + _request = build_evaluation_taxonomies_list_request( + input_name=input_name, + input_type=input_type, api_version=self._config.api_version, headers=_headers, params=_params, @@ -4919,7 +4890,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.EvaluationTaxonomy], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -4942,78 +4913,11 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: - """Get the specific version of the DatasetVersion. The service returns 404 Not Found error if the - DatasetVersion does not exist. - - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to retrieve. Required. - :type version: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - - _request = build_datasets_get_request( - name=name, - version=version, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.DatasetVersion, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Delete the specific version of the DatasetVersion. The service returns 204 No Content if the - DatasetVersion was deleted successfully or if the DatasetVersion does not exist. + def delete(self, name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements + """Delete an evaluation taxonomy by name. :param name: The name of the resource. Required. :type name: str - :param version: The version of the DatasetVersion to delete. Required. - :type version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -5031,9 +4935,8 @@ def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: dis cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_datasets_delete_request( + _request = build_evaluation_taxonomies_delete_request( name=name, - version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5054,102 +4957,116 @@ def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: dis map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore @overload - def create_or_update( + def create( self, name: str, - version: str, - dataset_version: _models.DatasetVersion, + body: _models.EvaluationTaxonomy, *, - content_type: str = "application/merge-patch+json", + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Required. - :type dataset_version: ~azure.ai.projects.models.DatasetVersion + :param body: The evaluation taxonomy. Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create_or_update( + def create( self, name: str, - version: str, - dataset_version: JSON, + body: JSON, *, - content_type: str = "application/merge-patch+json", + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Required. - :type dataset_version: JSON + :param body: The evaluation taxonomy. Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create_or_update( + def create( self, name: str, - version: str, - dataset_version: IO[bytes], + body: IO[bytes], *, - content_type: str = "application/merge-patch+json", + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + content_type: str = "application/json", **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Required. - :type dataset_version: IO[bytes] + :param body: The evaluation taxonomy. Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def create_or_update( - self, name: str, version: str, dataset_version: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any - ) -> _models.DatasetVersion: - """Create a new or update an existing DatasetVersion with the given version id. + def create( + self, + name: str, + body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, + **kwargs: Any + ) -> _models.EvaluationTaxonomy: + """Create an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to create or update. Required. - :type version: str - :param dataset_version: The DatasetVersion to create or update. Is one of the following types: - DatasetVersion, JSON, IO[bytes] Required. - :type dataset_version: ~azure.ai.projects.models.DatasetVersion or JSON or IO[bytes] - :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetVersion + :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, + IO[bytes] Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5164,18 +5081,18 @@ def create_or_update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) - content_type = content_type or "application/merge-patch+json" + content_type = content_type or "application/json" _content = None - if isinstance(dataset_version, (IOBase, bytes)): - _content = dataset_version + if isinstance(body, (IOBase, bytes)): + _content = body else: - _content = json.dumps(dataset_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_create_or_update_request( + _request = build_evaluation_taxonomies_create_request( name=name, - version=version, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -5206,7 +5123,7 @@ def create_or_update( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DatasetVersion, response.json()) + deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5214,103 +5131,107 @@ def create_or_update( return deserialized # type: ignore @overload - def pending_upload( + def update( self, name: str, - version: str, - pending_upload_request: _models.PendingUploadRequest, + body: _models.EvaluationTaxonomy, *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, content_type: str = "application/json", **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Required. - :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest + :param body: The evaluation taxonomy. Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def pending_upload( + def update( self, name: str, - version: str, - pending_upload_request: JSON, + body: JSON, *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, content_type: str = "application/json", **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Required. - :type pending_upload_request: JSON + :param body: The evaluation taxonomy. Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def pending_upload( + def update( self, name: str, - version: str, - pending_upload_request: IO[bytes], + body: IO[bytes], *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, content_type: str = "application/json", **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Required. - :type pending_upload_request: IO[bytes] + :param body: The evaluation taxonomy. Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def pending_upload( + def update( self, name: str, - version: str, - pending_upload_request: Union[_models.PendingUploadRequest, JSON, IO[bytes]], + body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW]] = None, **kwargs: Any - ) -> _models.PendingUploadResponse: - """Start a new or get an existing pending upload of a dataset for a specific version. + ) -> _models.EvaluationTaxonomy: + """Update an evaluation taxonomy. - :param name: The name of the resource. Required. + :param name: The name of the evaluation taxonomy. Required. :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :param pending_upload_request: The pending upload request parameters. Is one of the following - types: PendingUploadRequest, JSON, IO[bytes] Required. - :type pending_upload_request: ~azure.ai.projects.models.PendingUploadRequest or JSON or - IO[bytes] - :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.PendingUploadResponse + :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, + IO[bytes] Required. + :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationTaxonomy :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5325,18 +5246,18 @@ def pending_upload( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.PendingUploadResponse] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) content_type = content_type or "application/json" _content = None - if isinstance(pending_upload_request, (IOBase, bytes)): - _content = pending_upload_request + if isinstance(body, (IOBase, bytes)): + _content = body else: - _content = json.dumps(pending_upload_request, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_pending_upload_request( + _request = build_evaluation_taxonomies_update_request( name=name, - version=version, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -5367,23 +5288,39 @@ def pending_upload( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.PendingUploadResponse, response.json()) + deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + +class EvaluationRulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`evaluation_rules` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace - def get_credentials(self, name: str, version: str, **kwargs: Any) -> _models.DatasetCredential: - """Get the SAS credential to access the storage account associated with a Dataset version. + def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: + """Get an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the DatasetVersion to operate on. Required. - :type version: str - :return: DatasetCredential. The DatasetCredential is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.DatasetCredential + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5397,11 +5334,10 @@ def get_credentials(self, name: str, version: str, **kwargs: Any) -> _models.Dat _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DatasetCredential] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) - _request = build_datasets_get_credentials_request( - name=name, - version=version, + _request = build_evaluation_rules_get_request( + id=id, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5427,49 +5363,31 @@ def get_credentials(self, name: str, version: str, **kwargs: Any) -> _models.Dat map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DatasetCredential, response.json()) + deserialized = _deserialize(_models.EvaluationRule, response.json()) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - -class IndexesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`indexes` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list_versions(self, name: str, **kwargs: Any) -> ItemPaged["_models.Index"]: - """List all versions of the given Index. + def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements + """Delete an evaluation rule. - :param name: The name of the resource. Required. - :type name: str - :return: An iterator like instance of Index - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Index] + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :return: None + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) - error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5478,80 +5396,196 @@ def list_versions(self, name: str, **kwargs: Any) -> ItemPaged["_models.Index"]: } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - _request = build_indexes_list_versions_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + cls: ClsType[None] = kwargs.pop("cls", None) - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + _request = build_evaluation_rules_delete_request( + id=id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - return _request + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + response = pipeline_response.http_response - def get_next(next_link=None): - _request = prepare_request(next_link) + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore - return pipeline_response + @overload + def create_or_update( + self, id: str, evaluation_rule: _models.EvaluationRule, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. - return ItemPaged(get_next, extract_data) + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Required. + :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, id: str, evaluation_rule: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. + + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Required. + :type evaluation_rule: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, id: str, evaluation_rule: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. + + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Required. + :type evaluation_rule: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.Index"]: - """List the latest version of each Index. + def create_or_update( + self, id: str, evaluation_rule: Union[_models.EvaluationRule, JSON, IO[bytes]], **kwargs: Any + ) -> _models.EvaluationRule: + """Create or update an evaluation rule. - :return: An iterator like instance of Index - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Index] + :param id: Unique identifier for the evaluation rule. Required. + :type id: str + :param evaluation_rule: Evaluation rule resource. Is one of the following types: + EvaluationRule, JSON, IO[bytes] Required. + :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule or JSON or IO[bytes] + :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluationRule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(evaluation_rule, (IOBase, bytes)): + _content = evaluation_rule + else: + _content = json.dumps(evaluation_rule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluation_rules_create_or_update_request( + id=id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.EvaluationRule, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + *, + action_type: Optional[Union[str, _models.EvaluationRuleActionType]] = None, + agent_name: Optional[str] = None, + enabled: Optional[bool] = None, + **kwargs: Any + ) -> ItemPaged["_models.EvaluationRule"]: + """List all evaluation rules. + + :keyword action_type: Filter by the type of evaluation rule. Known values are: + "continuousEvaluation" and "humanEvaluation". Default value is None. + :paramtype action_type: str or ~azure.ai.projects.models.EvaluationRuleActionType + :keyword agent_name: Filter by the agent name. Default value is None. + :paramtype agent_name: str + :keyword enabled: Filter by the enabled status. Default value is None. + :paramtype enabled: bool + :return: An iterator like instance of EvaluationRule + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluationRule] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.EvaluationRule]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -5564,7 +5598,10 @@ def list(self, **kwargs: Any) -> ItemPaged["_models.Index"]: def prepare_request(next_link=None): if not next_link: - _request = build_indexes_list_request( + _request = build_evaluation_rules_list_request( + action_type=action_type, + agent_name=agent_name, + enabled=enabled, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5600,7 +5637,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.EvaluationRule], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -5622,35 +5659,272 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - @distributed_trace - def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: - """Get the specific version of the Index. The service returns 404 Not Found error if the Index - does not exist. - - :param name: The name of the resource. Required. - :type name: str - :param version: The specific version id of the Index to retrieve. Required. - :type version: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} +class EvaluatorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`evaluators` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_versions( + self, + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + limit: Optional[int] = None, + **kwargs: Any + ) -> ItemPaged["_models.EvaluatorVersion"]: + """List all versions of the given evaluator. + + :param name: The name of the resource. Required. + :type name: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one + of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default + value is None. + :paramtype type: str or str or str or str + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :return: An iterator like instance of EvaluatorVersion + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluatorVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Index] = kwargs.pop("cls", None) + cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) - _request = build_indexes_get_request( + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_evaluators_list_versions_request( + name=name, + foundry_features=foundry_features, + type=type, + limit=limit, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_latest_versions( + self, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + limit: Optional[int] = None, + **kwargs: Any + ) -> ItemPaged["_models.EvaluatorVersion"]: + """List the latest version of each evaluator. + + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one + of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default + value is None. + :paramtype type: str or str or str or str + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :return: An iterator like instance of EvaluatorVersion + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluatorVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_evaluators_list_latest_versions_request( + foundry_features=foundry_features, + type=type, + limit=limit, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_version( + self, + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Get the specific version of the EvaluatorVersion. The service returns 404 Not Found error if + the EvaluatorVersion does not exist. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the EvaluatorVersion to retrieve. Required. + :type version: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) + + _request = build_evaluators_get_version_request( name=name, version=version, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5679,7 +5953,7 @@ def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Index, response.json()) + deserialized = _deserialize(_models.EvaluatorVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5687,14 +5961,24 @@ def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: return deserialized # type: ignore @distributed_trace - def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Delete the specific version of the Index. The service returns 204 No Content if the Index was - deleted successfully or if the Index does not exist. + def delete_version( # pylint: disable=inconsistent-return-statements + self, + name: str, + version: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> None: + """Delete the specific version of the EvaluatorVersion. The service returns 204 No Content if the + EvaluatorVersion was deleted successfully or if the EvaluatorVersion does not exist. :param name: The name of the resource. Required. :type name: str - :param version: The version of the Index to delete. Required. + :param version: The version of the EvaluatorVersion to delete. Required. :type version: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -5712,9 +5996,10 @@ def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: dis cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_indexes_delete_request( + _request = build_evaluators_delete_version_request( name=name, version=version, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -5739,92 +6024,284 @@ def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: dis return cls(pipeline_response, None, {}) # type: ignore @overload - def create_or_update( + def create_version( + self, + name: str, + evaluator_version: _models.EvaluatorVersion, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. + + :param name: The name of the resource. Required. + :type name: str + :param evaluator_version: Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_version( + self, + name: str, + evaluator_version: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. + + :param name: The name of the resource. Required. + :type name: str + :param evaluator_version: Required. + :type evaluator_version: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_version( + self, + name: str, + evaluator_version: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. + + :param name: The name of the resource. Required. + :type name: str + :param evaluator_version: Required. + :type evaluator_version: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_version( + self, + name: str, + evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Create a new EvaluatorVersion with auto incremented version id. + + :param name: The name of the resource. Required. + :type name: str + :param evaluator_version: Is one of the following types: EvaluatorVersion, JSON, IO[bytes] + Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(evaluator_version, (IOBase, bytes)): + _content = evaluator_version + else: + _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluators_create_version_request( + name=name, + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.EvaluatorVersion, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def update_version( self, name: str, version: str, - index: _models.Index, + evaluator_version: _models.EvaluatorVersion, *, - content_type: str = "application/merge-patch+json", + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. + :param version: The version of the EvaluatorVersion to update. Required. :type version: str - :param index: The Index to create or update. Required. - :type index: ~azure.ai.projects.models.Index + :param evaluator_version: Evaluator resource. Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create_or_update( - self, name: str, version: str, index: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + def update_version( + self, + name: str, + version: str, + evaluator_version: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. + :param version: The version of the EvaluatorVersion to update. Required. :type version: str - :param index: The Index to create or update. Required. - :type index: JSON + :param evaluator_version: Evaluator resource. Required. + :type evaluator_version: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create_or_update( + def update_version( self, name: str, version: str, - index: IO[bytes], + evaluator_version: IO[bytes], *, - content_type: str = "application/merge-patch+json", + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + content_type: str = "application/json", **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. + :param version: The version of the EvaluatorVersion to update. Required. :type version: str - :param index: The Index to create or update. Required. - :type index: IO[bytes] + :param evaluator_version: Evaluator resource. Required. + :type evaluator_version: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/merge-patch+json". + Default value is "application/json". :paramtype content_type: str - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def create_or_update( - self, name: str, version: str, index: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any - ) -> _models.Index: - """Create a new or update an existing Index with the given version id. + def update_version( + self, + name: str, + version: str, + evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW], + **kwargs: Any + ) -> _models.EvaluatorVersion: + """Update an existing EvaluatorVersion with the given version id. :param name: The name of the resource. Required. :type name: str - :param version: The specific version id of the Index to create or update. Required. + :param version: The version of the EvaluatorVersion to update. Required. :type version: str - :param index: The Index to create or update. Is one of the following types: Index, JSON, - IO[bytes] Required. - :type index: ~azure.ai.projects.models.Index or JSON or IO[bytes] - :return: Index. The Index is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Index + :param evaluator_version: Evaluator resource. Is one of the following types: EvaluatorVersion, + JSON, IO[bytes] Required. + :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. EVALUATIONS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.EVALUATIONS_V1_PREVIEW + :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.EvaluatorVersion :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5839,18 +6316,19 @@ def create_or_update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Index] = kwargs.pop("cls", None) + cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) - content_type = content_type or "application/merge-patch+json" + content_type = content_type or "application/json" _content = None - if isinstance(index, (IOBase, bytes)): - _content = index + if isinstance(evaluator_version, (IOBase, bytes)): + _content = evaluator_version else: - _content = json.dumps(index, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_indexes_create_or_update_request( + _request = build_evaluators_update_version_request( name=name, version=version, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -5869,7 +6347,7 @@ def create_or_update( response = pipeline_response.http_response - if response.status_code not in [200, 201]: + if response.status_code not in [200]: if _stream: try: response.read() # Load the body in memory and close the socket @@ -5881,7 +6359,7 @@ def create_or_update( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Index, response.json()) + deserialized = _deserialize(_models.EvaluatorVersion, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5889,14 +6367,14 @@ def create_or_update( return deserialized # type: ignore -class DeploymentsOperations: +class IndexesOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`deployments` attribute. + :attr:`indexes` attribute. """ def __init__(self, *args, **kwargs) -> None: @@ -5907,97 +6385,19 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def get(self, name: str, **kwargs: Any) -> _models.Deployment: - """Get a deployed model. + def list_versions(self, name: str, **kwargs: Any) -> ItemPaged["_models.Index"]: + """List all versions of the given Index. - :param name: Name of the deployment. Required. + :param name: The name of the resource. Required. :type name: str - :return: Deployment. The Deployment is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Deployment - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.Deployment] = kwargs.pop("cls", None) - - _request = build_deployments_get_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.Deployment, response.json()) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list( - self, - *, - model_publisher: Optional[str] = None, - model_name: Optional[str] = None, - deployment_type: Optional[Union[str, _models.DeploymentType]] = None, - **kwargs: Any - ) -> ItemPaged["_models.Deployment"]: - """List all deployed models in the project. - - :keyword model_publisher: Model publisher to filter models by. Default value is None. - :paramtype model_publisher: str - :keyword model_name: Model name (the publisher specific name) to filter models by. Default - value is None. - :paramtype model_name: str - :keyword deployment_type: Type of deployment to filter list by. "ModelDeployment" Default value - is None. - :paramtype deployment_type: str or ~azure.ai.projects.models.DeploymentType - :return: An iterator like instance of Deployment - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Deployment] + :return: An iterator like instance of Index + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Index] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Deployment]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -6010,10 +6410,8 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_deployments_list_request( - model_publisher=model_publisher, - model_name=model_name, - deployment_type=deployment_type, + _request = build_indexes_list_versions_request( + name=name, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6049,7 +6447,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Deployment], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -6060,112 +6458,29 @@ def get_next(next_link=None): _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - -class RedTeamsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`red_teams` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def get(self, name: str, **kwargs: Any) -> _models.RedTeam: - """Get a redteam by name. - - :param name: Identifier of the red team run. Required. - :type name: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) - - _request = build_red_teams_get_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) + ) + response = pipeline_response.http_response - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.RedTeam, response.json()) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + return pipeline_response - return deserialized # type: ignore + return ItemPaged(get_next, extract_data) @distributed_trace - def list(self, **kwargs: Any) -> ItemPaged["_models.RedTeam"]: - """List a redteam by name. + def list(self, **kwargs: Any) -> ItemPaged["_models.Index"]: + """List the latest version of each Index. - :return: An iterator like instance of RedTeam - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.RedTeam] + :return: An iterator like instance of Index + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Index] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.RedTeam]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -6178,7 +6493,7 @@ def list(self, **kwargs: Any) -> ItemPaged["_models.RedTeam"]: def prepare_request(next_link=None): if not next_link: - _request = build_red_teams_list_request( + _request = build_indexes_list_request( api_version=self._config.api_version, headers=_headers, params=_params, @@ -6214,7 +6529,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.RedTeam], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -6236,146 +6551,17 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - @overload - def create( - self, red_team: _models.RedTeam, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Required. - :type red_team: ~azure.ai.projects.models.RedTeam - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create(self, red_team: JSON, *, content_type: str = "application/json", **kwargs: Any) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Required. - :type red_team: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def create(self, red_team: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Required. - :type red_team: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: - """Creates a redteam run. - - :param red_team: Redteam to be run. Is one of the following types: RedTeam, JSON, IO[bytes] - Required. - :type red_team: ~azure.ai.projects.models.RedTeam or JSON or IO[bytes] - :return: RedTeam. The RedTeam is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.RedTeam - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _content = None - if isinstance(red_team, (IOBase, bytes)): - _content = red_team - else: - _content = json.dumps(red_team, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - - _request = build_red_teams_create_request( - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.RedTeam, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - -class EvaluationRulesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`evaluation_rules` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: - """Get an evaluation rule. + def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: + """Get the specific version of the Index. The service returns 404 Not Found error if the Index + does not exist. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to retrieve. Required. + :type version: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -6389,10 +6575,11 @@ def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) + cls: ClsType[_models.Index] = kwargs.pop("cls", None) - _request = build_evaluation_rules_get_request( - id=id, + _request = build_indexes_get_request( + name=name, + version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6418,27 +6605,25 @@ def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationRule, response.json()) + deserialized = _deserialize(_models.Index, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore @distributed_trace - def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Delete an evaluation rule. + def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements + """Delete the specific version of the Index. The service returns 204 No Content if the Index was + deleted successfully or if the Index does not exist. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the Index to delete. Required. + :type version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -6456,8 +6641,9 @@ def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsisten cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_evaluation_rules_delete_request( - id=id, + _request = build_indexes_delete_request( + name=name, + version=version, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6478,81 +6664,96 @@ def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsisten map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, None, {}) # type: ignore @overload def create_or_update( - self, id: str, evaluation_rule: _models.EvaluationRule, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + self, + name: str, + version: str, + index: _models.Index, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Required. - :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Required. + :type index: ~azure.ai.projects.models.Index :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ @overload def create_or_update( - self, id: str, evaluation_rule: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + self, name: str, version: str, index: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Required. - :type evaluation_rule: JSON + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Required. + :type index: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ @overload def create_or_update( - self, id: str, evaluation_rule: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + self, + name: str, + version: str, + index: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Required. - :type evaluation_rule: IO[bytes] + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Required. + :type index: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def create_or_update( - self, id: str, evaluation_rule: Union[_models.EvaluationRule, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluationRule: - """Create or update an evaluation rule. + self, name: str, version: str, index: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any + ) -> _models.Index: + """Create a new or update an existing Index with the given version id. - :param id: Unique identifier for the evaluation rule. Required. - :type id: str - :param evaluation_rule: Evaluation rule resource. Is one of the following types: - EvaluationRule, JSON, IO[bytes] Required. - :type evaluation_rule: ~azure.ai.projects.models.EvaluationRule or JSON or IO[bytes] - :return: EvaluationRule. The EvaluationRule is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationRule + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or update. Required. + :type version: str + :param index: The Index to create or update. Is one of the following types: Index, JSON, + IO[bytes] Required. + :type index: ~azure.ai.projects.models.Index or JSON or IO[bytes] + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Index :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -6567,17 +6768,18 @@ def create_or_update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluationRule] = kwargs.pop("cls", None) + cls: ClsType[_models.Index] = kwargs.pop("cls", None) - content_type = content_type or "application/json" + content_type = content_type or "application/merge-patch+json" _content = None - if isinstance(evaluation_rule, (IOBase, bytes)): - _content = evaluation_rule + if isinstance(index, (IOBase, bytes)): + _content = index else: - _content = json.dumps(evaluation_rule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(index, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluation_rules_create_or_update_request( - id=id, + _request = build_indexes_create_or_update_request( + name=name, + version=version, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -6608,40 +6810,126 @@ def create_or_update( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationRule, response.json()) + deserialized = _deserialize(_models.Index, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @distributed_trace - def list( + +class InsightsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`insights` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + def generate( self, + insight: _models.Insight, *, - action_type: Optional[Union[str, _models.EvaluationRuleActionType]] = None, - agent_name: Optional[str] = None, - enabled: Optional[bool] = None, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + content_type: str = "application/json", **kwargs: Any - ) -> ItemPaged["_models.EvaluationRule"]: - """List all evaluation rules. + ) -> _models.Insight: + """Generate Insights. - :keyword action_type: Filter by the type of evaluation rule. Known values are: - "continuousEvaluation" and "humanEvaluation". Default value is None. - :paramtype action_type: str or ~azure.ai.projects.models.EvaluationRuleActionType - :keyword agent_name: Filter by the agent name. Default value is None. - :paramtype agent_name: str - :keyword enabled: Filter by the enabled status. Default value is None. - :paramtype enabled: bool - :return: An iterator like instance of EvaluationRule - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluationRule] + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Required. + :type insight: ~azure.ai.projects.models.Insight + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.EvaluationRule]] = kwargs.pop("cls", None) + @overload + def generate( + self, + insight: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. + + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Required. + :type insight: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def generate( + self, + insight: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. + + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Required. + :type insight: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace + def generate( + self, + insight: Union[_models.Insight, JSON, IO[bytes]], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + **kwargs: Any + ) -> _models.Insight: + """Generate Insights. + + :param insight: Complete evaluation configuration including data source, evaluators, and result + settings. Is one of the following types: Insight, JSON, IO[bytes] Required. + :type insight: ~azure.ai.projects.models.Insight or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -6650,96 +6938,69 @@ def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: - - _request = build_evaluation_rules_list_request( - action_type=action_type, - agent_name=agent_name, - enabled=enabled, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationRule], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - def get_next(next_link=None): - _request = prepare_request(next_link) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Insight] = kwargs.pop("cls", None) - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response + content_type = content_type or "application/json" + _content = None + if isinstance(insight, (IOBase, bytes)): + _content = insight + else: + _content = json.dumps(insight, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + _request = build_insights_generate_request( + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - return pipeline_response + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - return ItemPaged(get_next, extract_data) + response = pipeline_response.http_response + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) -class EvaluationTaxonomiesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Insight, response.json()) - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`evaluation_taxonomies` attribute. - """ + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + return deserialized # type: ignore @distributed_trace - def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: - """Get an evaluation run by name. + def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> _models.Insight: + """Get a specific insight by Id. - :param name: The name of the resource. Required. - :type name: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :param id: The unique identifier for the insights report. Required. + :type id: str + :keyword include_coordinates: Whether to include coordinates for visualization in the response. + Defaults to false. Default value is None. + :paramtype include_coordinates: bool + :return: Insight. The Insight is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Insight :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -6753,10 +7014,11 @@ def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + cls: ClsType[_models.Insight] = kwargs.pop("cls", None) - _request = build_evaluation_taxonomies_get_request( - name=name, + _request = build_insights_get_request( + id=id, + include_coordinates=include_coordinates, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6790,7 +7052,7 @@ def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + deserialized = _deserialize(_models.Insight, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -6799,22 +7061,37 @@ def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: @distributed_trace def list( - self, *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any - ) -> ItemPaged["_models.EvaluationTaxonomy"]: - """List evaluation taxonomies. + self, + *, + type: Optional[Union[str, _models.InsightType]] = None, + eval_id: Optional[str] = None, + run_id: Optional[str] = None, + agent_name: Optional[str] = None, + include_coordinates: Optional[bool] = None, + **kwargs: Any + ) -> ItemPaged["_models.Insight"]: + """List all insights in reverse chronological order (newest first). - :keyword input_name: Filter by the evaluation input name. Default value is None. - :paramtype input_name: str - :keyword input_type: Filter by taxonomy input type. Default value is None. - :paramtype input_type: str - :return: An iterator like instance of EvaluationTaxonomy - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluationTaxonomy] + :keyword type: Filter by the type of analysis. Known values are: "EvaluationRunClusterInsight", + "AgentClusterInsight", and "EvaluationComparison". Default value is None. + :paramtype type: str or ~azure.ai.projects.models.InsightType + :keyword eval_id: Filter by the evaluation ID. Default value is None. + :paramtype eval_id: str + :keyword run_id: Filter by the evaluation run ID. Default value is None. + :paramtype run_id: str + :keyword agent_name: Filter by the agent name. Default value is None. + :paramtype agent_name: str + :keyword include_coordinates: Whether to include coordinates for visualization in the response. + Defaults to false. Default value is None. + :paramtype include_coordinates: bool + :return: An iterator like instance of Insight + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Insight] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.EvaluationTaxonomy]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.Insight]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -6827,9 +7104,12 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_evaluation_taxonomies_list_request( - input_name=input_name, - input_type=input_type, + _request = build_insights_list_request( + type=type, + eval_id=eval_id, + run_id=run_id, + agent_name=agent_name, + include_coordinates=include_coordinates, api_version=self._config.api_version, headers=_headers, params=_params, @@ -6865,7 +7145,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationTaxonomy], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.Insight], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -6887,126 +7167,136 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - @distributed_trace - def delete(self, name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Delete an evaluation taxonomy by name. - - :param name: The name of the resource. Required. - :type name: str - :return: None - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_evaluation_taxonomies_delete_request( - name=name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) +class MemoryStoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`memory_stores` attribute. + """ - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload def create( - self, name: str, body: _models.EvaluationTaxonomy, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + name: str, + definition: _models.MemoryStoreDefinition, + content_type: str = "application/json", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword name: The name of the memory store. Required. + :paramtype name: str + :keyword definition: The memory store definition. Required. + :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload def create( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload def create( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def create( - self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Create an evaluation taxonomy. + self, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + name: str = _Unset, + definition: _models.MemoryStoreDefinition = _Unset, + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Create a memory store. - :param name: The name of the evaluation taxonomy. Required. - :type name: str - :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, - IO[bytes] Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword name: The name of the memory store. Required. + :paramtype name: str + :keyword definition: The memory store definition. Required. + :paramtype definition: ~azure.ai.projects.models.MemoryStoreDefinition + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -7021,8 +7311,15 @@ def create( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + if body is _Unset: + if name is _Unset: + raise TypeError("missing required argument: name") + if definition is _Unset: + raise TypeError("missing required argument: definition") + body = {"definition": definition, "description": description, "metadata": metadata, "name": name} + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None if isinstance(body, (IOBase, bytes)): @@ -7030,8 +7327,8 @@ def create( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluation_taxonomies_create_request( - name=name, + _request = build_memory_stores_create_request( + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -7050,19 +7347,23 @@ def create( response = pipeline_response.http_response - if response.status_code not in [200, 201]: + if response.status_code not in [200]: if _stream: try: response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -7071,71 +7372,116 @@ def create( @overload def update( - self, name: str, body: _models.EvaluationTaxonomy, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload def update( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload def update( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Required. + :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def update( - self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluationTaxonomy: - """Update an evaluation taxonomy. + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Update a memory store. - :param name: The name of the evaluation taxonomy. Required. + :param name: The name of the memory store to update. Required. :type name: str - :param body: The evaluation taxonomy. Is one of the following types: EvaluationTaxonomy, JSON, - IO[bytes] Required. - :type body: ~azure.ai.projects.models.EvaluationTaxonomy or JSON or IO[bytes] - :return: EvaluationTaxonomy. The EvaluationTaxonomy is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluationTaxonomy + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword description: A human-readable description of the memory store. Default value is None. + :paramtype description: str + :keyword metadata: Arbitrary key-value metadata to associate with the memory store. Default + value is None. + :paramtype metadata: dict[str, str] + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -7150,8 +7496,11 @@ def update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluationTaxonomy] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) + if body is _Unset: + body = {"description": description, "metadata": metadata} + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None if isinstance(body, (IOBase, bytes)): @@ -7159,8 +7508,9 @@ def update( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluation_taxonomies_update_request( + _request = build_memory_stores_update_request( name=name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -7186,65 +7536,37 @@ def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluationTaxonomy, response.json()) + deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - -class EvaluatorsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`evaluators` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list_versions( - self, - name: str, - *, - type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, - limit: Optional[int] = None, - **kwargs: Any - ) -> ItemPaged["_models.EvaluatorVersion"]: - """List all versions of the given evaluator. + def get( + self, name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any + ) -> _models.MemoryStoreDetails: + """Retrieve a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store to retrieve. Required. :type name: str - :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one - of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default - value is None. - :paramtype type: str or str or str or str - :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the default is 20. Default value is None. - :paramtype limit: int - :return: An iterator like instance of EvaluatorVersion - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluatorVersion] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :return: MemoryStoreDetails. The MemoryStoreDetails is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDetails :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) - error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -7253,95 +7575,90 @@ def list_versions( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: - - _request = build_evaluators_list_versions_request( - name=name, - type=type, - limit=limit, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + cls: ClsType[_models.MemoryStoreDetails] = kwargs.pop("cls", None) - return _request + _request = build_memory_stores_get_request( + name=name, + foundry_features=foundry_features, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - def get_next(next_link=None): - _request = prepare_request(next_link) + response = pipeline_response.http_response - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, ) - response = pipeline_response.http_response + raise HttpResponseError(response=response, model=error) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.MemoryStoreDetails, response.json()) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore - return ItemPaged(get_next, extract_data) + return deserialized # type: ignore @distributed_trace - def list_latest_versions( + def list( self, *, - type: Optional[Union[Literal["builtin"], Literal["custom"], Literal["all"], str]] = None, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], limit: Optional[int] = None, + order: Optional[Union[str, _models.PageOrder]] = None, + before: Optional[str] = None, **kwargs: Any - ) -> ItemPaged["_models.EvaluatorVersion"]: - """List the latest version of each evaluator. + ) -> ItemPaged["_models.MemoryStoreDetails"]: + """List all memory stores. - :keyword type: Filter evaluators by type. Possible values: 'all', 'custom', 'builtin'. Is one - of the following types: Literal["builtin"], Literal["custom"], Literal["all"], str Default - value is None. - :paramtype type: str or str or str or str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the default is 20. Default value is None. + 100, and the + default is 20. Default value is None. :paramtype limit: int - :return: An iterator like instance of EvaluatorVersion - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.EvaluatorVersion] + :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for + ascending order and``desc`` + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder + :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your + place in the list. + For instance, if you make a list request and receive 100 objects, ending with obj_foo, your + subsequent call can include before=obj_foo in order to fetch the previous page of the list. + Default value is None. + :paramtype before: str + :return: An iterator like instance of MemoryStoreDetails + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.MemoryStoreDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.EvaluatorVersion]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.MemoryStoreDetails]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -7351,54 +7668,33 @@ def list_latest_versions( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: - - _request = build_evaluators_list_latest_versions_request( - type=type, - limit=limit, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) + def prepare_request(_continuation_token=None): + _request = build_memory_stores_list_request( + foundry_features=foundry_features, + limit=limit, + order=order, + after=_continuation_token, + before=before, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluatorVersion], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.MemoryStoreDetails], deserialized.get("data", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + return deserialized.get("last_id") or None, iter(list_of_elem) - def get_next(next_link=None): - _request = prepare_request(next_link) + def get_next(_continuation_token=None): + _request = prepare_request(_continuation_token) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access @@ -7408,23 +7704,29 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) return pipeline_response return ItemPaged(get_next, extract_data) @distributed_trace - def get_version(self, name: str, version: str, **kwargs: Any) -> _models.EvaluatorVersion: - """Get the specific version of the EvaluatorVersion. The service returns 404 Not Found error if - the EvaluatorVersion does not exist. + def delete( + self, name: str, *, foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], **kwargs: Any + ) -> _models.DeleteMemoryStoreResult: + """Delete a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store to delete. Required. :type name: str - :param version: The specific version id of the EvaluatorVersion to retrieve. Required. - :type version: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :return: DeleteMemoryStoreResult. The DeleteMemoryStoreResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.DeleteMemoryStoreResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -7438,11 +7740,11 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Evaluat _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) + cls: ClsType[_models.DeleteMemoryStoreResult] = kwargs.pop("cls", None) - _request = build_evaluators_get_version_request( + _request = build_memory_stores_delete_request( name=name, - version=version, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -7466,144 +7768,148 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Evaluat except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluatorVersion, response.json()) + deserialized = _deserialize(_models.DeleteMemoryStoreResult, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @distributed_trace - def delete_version( # pylint: disable=inconsistent-return-statements - self, name: str, version: str, **kwargs: Any - ) -> None: - """Delete the specific version of the EvaluatorVersion. The service returns 204 No Content if the - EvaluatorVersion was deleted successfully or if the EvaluatorVersion does not exist. - - :param name: The name of the resource. Required. - :type name: str - :param version: The version of the EvaluatorVersion to delete. Required. - :type version: str - :return: None - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_evaluators_delete_version_request( - name=name, - version=version, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if cls: - return cls(pipeline_response, None, {}) # type: ignore - @overload - def create_version( + def search_memories( self, name: str, - evaluator_version: _models.EvaluatorVersion, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str, content_type: str = "application/json", + items: Optional[List[_models.InputItem]] = None, + previous_search_id: Optional[str] = None, + options: Optional[_models.MemorySearchOptions] = None, **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. - :param name: The name of the resource. Required. + :param name: The name of the memory store to search. Required. :type name: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :keyword items: Items for which to search for relevant memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.InputItem] + :keyword previous_search_id: The unique ID of the previous search request, enabling incremental + memory search from where the last operation left off. Default value is None. + :paramtype previous_search_id: str + :keyword options: Memory search options. Default value is None. + :paramtype options: ~azure.ai.projects.models.MemorySearchOptions + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create_version( - self, name: str, evaluator_version: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. + def search_memories( + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. - :param name: The name of the resource. Required. + :param name: The name of the memory store to search. Required. :type name: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: JSON + :param body: Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create_version( - self, name: str, evaluator_version: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. + def search_memories( + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. - :param name: The name of the resource. Required. + :param name: The name of the memory store to search. Required. :type name: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: IO[bytes] + :param body: Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def create_version( - self, name: str, evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], **kwargs: Any - ) -> _models.EvaluatorVersion: - """Create a new EvaluatorVersion with auto incremented version id. + def search_memories( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, + items: Optional[List[_models.InputItem]] = None, + previous_search_id: Optional[str] = None, + options: Optional[_models.MemorySearchOptions] = None, + **kwargs: Any + ) -> _models.MemoryStoreSearchResult: + """Search for relevant memories from a memory store based on conversation context. - :param name: The name of the resource. Required. + :param name: The name of the memory store to search. Required. :type name: str - :param evaluator_version: Evaluator resource. Is one of the following types: EvaluatorVersion, - JSON, IO[bytes] Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword items: Items for which to search for relevant memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.InputItem] + :keyword previous_search_id: The unique ID of the previous search request, enabling incremental + memory search from where the last operation left off. Default value is None. + :paramtype previous_search_id: str + :keyword options: Memory search options. Default value is None. + :paramtype options: ~azure.ai.projects.models.MemorySearchOptions + :return: MemoryStoreSearchResult. The MemoryStoreSearchResult is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreSearchResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -7618,17 +7924,28 @@ def create_version( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreSearchResult] = kwargs.pop("cls", None) + if body is _Unset: + if scope is _Unset: + raise TypeError("missing required argument: scope") + body = { + "items": items, + "options": options, + "previous_search_id": previous_search_id, + "scope": scope, + } + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None - if isinstance(evaluator_version, (IOBase, bytes)): - _content = evaluator_version + if isinstance(body, (IOBase, bytes)): + _content = body else: - _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_evaluators_create_version_request( + _request = build_memory_stores_search_memories_request( name=name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -7647,253 +7964,362 @@ def create_version( response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: if _stream: try: response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.EvaluatorVersion, response.json()) + deserialized = _deserialize(_models.MemoryStoreSearchResult, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + def _update_memories_initial( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, + items: Optional[List[_models.InputItem]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + if body is _Unset: + if scope is _Unset: + raise TypeError("missing required argument: scope") + body = { + "items": items, + "previous_update_id": previous_update_id, + "scope": scope, + "update_delay": update_delay, + } + body = {k: v for k, v in body.items() if v is not None} + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_memory_stores_update_memories_request( + name=name, + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def _begin_update_memories( + self, + name: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str, + content_type: str = "application/json", + items: Optional[List[_models.InputItem]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any + ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... + @overload + def _begin_update_memories( + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... + @overload + def _begin_update_memories( + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... + + @distributed_trace + def _begin_update_memories( + self, + name: str, + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, + items: Optional[List[_models.InputItem]] = None, + previous_update_id: Optional[str] = None, + update_delay: Optional[int] = None, + **kwargs: Any + ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: + """Update memory store with conversation memories. + + :param name: The name of the memory store to update. Required. + :type name: str + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :paramtype scope: str + :keyword items: Conversation items from which to extract memories. Default value is None. + :paramtype items: list[~azure.ai.projects.models.InputItem] + :keyword previous_update_id: The unique ID of the previous update request, enabling incremental + memory updates from where the last operation left off. Default value is None. + :paramtype previous_update_id: str + :keyword update_delay: Timeout period before processing the memory update in seconds. + If a new update request is received during this period, it will cancel the current request and + reset the timeout. + Set to 0 to immediately trigger the update without delay. + Defaults to 300 (5 minutes). Default value is None. + :paramtype update_delay: int + :return: An instance of LROPoller that returns MemoryStoreUpdateCompletedResult. The + MemoryStoreUpdateCompletedResult is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.ai.projects.models.MemoryStoreUpdateCompletedResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MemoryStoreUpdateCompletedResult] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_memories_initial( + name=name, + body=body, + foundry_features=foundry_features, + scope=scope, + items=items, + previous_update_id=previous_update_id, + update_delay=update_delay, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Operation-Location"] = self._deserialize( + "str", response.headers.get("Operation-Location") + ) + + deserialized = _deserialize(_models.MemoryStoreUpdateCompletedResult, response.json().get("result", {})) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.MemoryStoreUpdateCompletedResult].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.MemoryStoreUpdateCompletedResult]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + @overload - def update_version( + def delete_scope( self, name: str, - version: str, - evaluator_version: _models.EvaluatorVersion, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories to delete, such as a + user ID. Required. + :paramtype scope: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def update_version( - self, name: str, version: str, evaluator_version: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + def delete_scope( + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: JSON + :param body: Required. + :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def update_version( + def delete_scope( self, name: str, - version: str, - evaluator_version: IO[bytes], + body: IO[bytes], *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], content_type: str = "application/json", **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Required. - :type evaluator_version: IO[bytes] + :param body: Required. + :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def update_version( + def delete_scope( self, name: str, - version: str, - evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], + body: Union[JSON, IO[bytes]] = _Unset, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + scope: str = _Unset, **kwargs: Any - ) -> _models.EvaluatorVersion: - """Update an existing EvaluatorVersion with the given version id. + ) -> _models.MemoryStoreDeleteScopeResult: + """Delete all memories associated with a specific scope from a memory store. - :param name: The name of the resource. Required. + :param name: The name of the memory store. Required. :type name: str - :param version: The version of the EvaluatorVersion to update. Required. - :type version: str - :param evaluator_version: Evaluator resource. Is one of the following types: EvaluatorVersion, - JSON, IO[bytes] Required. - :type evaluator_version: ~azure.ai.projects.models.EvaluatorVersion or JSON or IO[bytes] - :return: EvaluatorVersion. The EvaluatorVersion is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.EvaluatorVersion - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.EvaluatorVersion] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _content = None - if isinstance(evaluator_version, (IOBase, bytes)): - _content = evaluator_version - else: - _content = json.dumps(evaluator_version, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - - _request = build_evaluators_update_version_request( - name=name, - version=version, - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.EvaluatorVersion, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - -class InsightsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.ai.projects.AIProjectClient`'s - :attr:`insights` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def generate( - self, insight: _models.Insight, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Required. - :type insight: ~azure.ai.projects.models.Insight - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def generate(self, insight: JSON, *, content_type: str = "application/json", **kwargs: Any) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Required. - :type insight: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def generate(self, insight: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Required. - :type insight: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: Any) -> _models.Insight: - """Generate Insights. - - :param insight: Complete evaluation configuration including data source, evaluators, and result - settings. Is one of the following types: Insight, JSON, IO[bytes] Required. - :type insight: ~azure.ai.projects.models.Insight or JSON or IO[bytes] - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW + :keyword scope: The namespace that logically groups and isolates memories to delete, such as a + user ID. Required. + :paramtype scope: str + :return: MemoryStoreDeleteScopeResult. The MemoryStoreDeleteScopeResult is compatible with + MutableMapping + :rtype: ~azure.ai.projects.models.MemoryStoreDeleteScopeResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -7908,16 +8334,23 @@ def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: A _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Insight] = kwargs.pop("cls", None) + cls: ClsType[_models.MemoryStoreDeleteScopeResult] = kwargs.pop("cls", None) + if body is _Unset: + if scope is _Unset: + raise TypeError("missing required argument: scope") + body = {"scope": scope} + body = {k: v for k, v in body.items() if v is not None} content_type = content_type or "application/json" _content = None - if isinstance(insight, (IOBase, bytes)): - _content = insight + if isinstance(body, (IOBase, bytes)): + _content = body else: - _content = json.dumps(insight, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_insights_generate_request( + _request = build_memory_stores_delete_scope_request( + name=name, + foundry_features=foundry_features, content_type=content_type, api_version=self._config.api_version, content=_content, @@ -7936,36 +8369,55 @@ def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: A response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: if _stream: try: response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Insight, response.json()) + deserialized = _deserialize(_models.MemoryStoreDeleteScopeResult, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + +class RedTeamsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`red_teams` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace - def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> _models.Insight: - """Get a specific insight by Id. + def get(self, name: str, **kwargs: Any) -> _models.RedTeam: + """Get a redteam by name. - :param id: The unique identifier for the insights report. Required. - :type id: str - :keyword include_coordinates: Whether to include coordinates for visualization in the response. - Defaults to false. Default value is None. - :paramtype include_coordinates: bool - :return: Insight. The Insight is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.Insight + :param name: Identifier of the red team run. Required. + :type name: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -7979,11 +8431,10 @@ def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Insight] = kwargs.pop("cls", None) + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) - _request = build_insights_get_request( - id=id, - include_coordinates=include_coordinates, + _request = build_red_teams_get_request( + name=name, api_version=self._config.api_version, headers=_headers, params=_params, @@ -8017,7 +8468,7 @@ def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.Insight, response.json()) + deserialized = _deserialize(_models.RedTeam, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -8025,38 +8476,17 @@ def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: return deserialized # type: ignore @distributed_trace - def list( - self, - *, - type: Optional[Union[str, _models.InsightType]] = None, - eval_id: Optional[str] = None, - run_id: Optional[str] = None, - agent_name: Optional[str] = None, - include_coordinates: Optional[bool] = None, - **kwargs: Any - ) -> ItemPaged["_models.Insight"]: - """List all insights in reverse chronological order (newest first). + def list(self, **kwargs: Any) -> ItemPaged["_models.RedTeam"]: + """List a redteam by name. - :keyword type: Filter by the type of analysis. Known values are: "EvaluationRunClusterInsight", - "AgentClusterInsight", and "EvaluationComparison". Default value is None. - :paramtype type: str or ~azure.ai.projects.models.InsightType - :keyword eval_id: Filter by the evaluation ID. Default value is None. - :paramtype eval_id: str - :keyword run_id: Filter by the evaluation run ID. Default value is None. - :paramtype run_id: str - :keyword agent_name: Filter by the agent name. Default value is None. - :paramtype agent_name: str - :keyword include_coordinates: Whether to include coordinates for visualization in the response. - Defaults to false. Default value is None. - :paramtype include_coordinates: bool - :return: An iterator like instance of Insight - :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.Insight] + :return: An iterator like instance of RedTeam + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.models.RedTeam] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Insight]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.RedTeam]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -8069,12 +8499,7 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_insights_list_request( - type=type, - eval_id=eval_id, - run_id=run_id, - agent_name=agent_name, - include_coordinates=include_coordinates, + _request = build_red_teams_list_request( api_version=self._config.api_version, headers=_headers, params=_params, @@ -8110,7 +8535,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Insight], deserialized.get("value", [])) + list_of_elem = _deserialize(List[_models.RedTeam], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -8132,6 +8557,162 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) + @overload + def create( + self, + red_team: _models.RedTeam, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: ~azure.ai.projects.models.RedTeam + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + red_team: JSON, + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + red_team: IO[bytes], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create( + self, + red_team: Union[_models.RedTeam, JSON, IO[bytes]], + *, + foundry_features: Optional[Literal[FoundryFeaturesOptInKeys.RED_TEAMS_V1_PREVIEW]] = None, + **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Is one of the following types: RedTeam, JSON, IO[bytes] + Required. + :type red_team: ~azure.ai.projects.models.RedTeam or JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. RED_TEAMS_V1_PREVIEW. Default value is None. + :paramtype foundry_features: str or ~azure.ai.projects.models.RED_TEAMS_V1_PREVIEW + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(red_team, (IOBase, bytes)): + _content = red_team + else: + _content = json.dumps(red_team, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_red_teams_create_request( + foundry_features=foundry_features, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RedTeam, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + class SchedulesOperations: """ @@ -8352,16 +8933,16 @@ def get_next(next_link=None): @overload def create_or_update( - self, id: str, schedule: _models.Schedule, *, content_type: str = "application/json", **kwargs: Any + self, id: str, schedule: _models.Schedule, *, content_type: str = "application/merge-patch+json", **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Required. + :param schedule: The resource instance. Required. :type schedule: ~azure.ai.projects.models.Schedule :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -8370,16 +8951,16 @@ def create_or_update( @overload def create_or_update( - self, id: str, schedule: JSON, *, content_type: str = "application/json", **kwargs: Any + self, id: str, schedule: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Required. + :param schedule: The resource instance. Required. :type schedule: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -8388,16 +8969,16 @@ def create_or_update( @overload def create_or_update( - self, id: str, schedule: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, id: str, schedule: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Required. + :param schedule: The resource instance. Required. :type schedule: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -8408,12 +8989,12 @@ def create_or_update( def create_or_update( self, id: str, schedule: Union[_models.Schedule, JSON, IO[bytes]], **kwargs: Any ) -> _models.Schedule: - """Create or update a schedule by id. + """Create or update operation template. :param id: Identifier of the schedule. Required. :type id: str - :param schedule: Schedule resource. Is one of the following types: Schedule, JSON, IO[bytes] - Required. + :param schedule: The resource instance. Is one of the following types: Schedule, JSON, + IO[bytes] Required. :type schedule: ~azure.ai.projects.models.Schedule or JSON or IO[bytes] :return: Schedule. The Schedule is compatible with MutableMapping :rtype: ~azure.ai.projects.models.Schedule @@ -8433,7 +9014,7 @@ def create_or_update( content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) - content_type = content_type or "application/json" + content_type = content_type or "application/merge-patch+json" _content = None if isinstance(schedule, (IOBase, bytes)): _content = schedule @@ -8469,24 +9050,39 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + if _stream: deserialized = response.iter_bytes() else: deserialized = _deserialize(_models.Schedule, response.json()) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @distributed_trace - def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.ScheduleRun: + def get_run( + self, + schedule_id: str, + run_id: str, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW], + **kwargs: Any + ) -> _models.ScheduleRun: """Get a schedule run by id. - :param schedule_id: Identifier of the schedule. Required. + :param schedule_id: The unique identifier of the schedule. Required. :type schedule_id: str - :param run_id: Identifier of the schedule run. Required. + :param run_id: The unique identifier of the schedule run. Required. :type run_id: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. INSIGHTS_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.INSIGHTS_V1_PREVIEW :return: ScheduleRun. The ScheduleRun is compatible with MutableMapping :rtype: ~azure.ai.projects.models.ScheduleRun :raises ~azure.core.exceptions.HttpResponseError: @@ -8507,6 +9103,7 @@ def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.Sched _request = build_schedules_get_run_request( schedule_id=schedule_id, run_id=run_id, + foundry_features=foundry_features, api_version=self._config.api_version, headers=_headers, params=_params, @@ -8530,7 +9127,11 @@ def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.Sched except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) if _stream: deserialized = response.iter_bytes() diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index ca25e90aa008..ff7ebcc51358 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -7,12 +7,13 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import Union, Optional, Any, List, overload, IO, cast +from typing import Union, Optional, Any, List, overload, IO, cast, Literal from azure.core.tracing.decorator import distributed_trace from azure.core.polling import NoPolling from azure.core.utils import case_insensitive_dict from .. import models as _models from ..models import ( + FoundryFeaturesOptInKeys, MemoryStoreOperationUsage, ResponseUsageInputTokensDetails, ResponseUsageOutputTokensDetails, @@ -32,6 +33,7 @@ def begin_update_memories( self, name: str, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], scope: str, content_type: str = "application/json", items: Optional[List[_models.InputItem]] = None, @@ -43,6 +45,9 @@ def begin_update_memories( :param name: The name of the memory store to update. Required. :type name: str + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. Required. :paramtype scope: str @@ -69,7 +74,13 @@ def begin_update_memories( @overload def begin_update_memories( - self, name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + self, + name: str, + body: JSON, + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any, ) -> UpdateMemoriesLROPoller: """Update memory store with conversation memories. @@ -77,6 +88,9 @@ def begin_update_memories( :type name: str :param body: Required. :type body: JSON + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -89,7 +103,13 @@ def begin_update_memories( @overload def begin_update_memories( - self, name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + name: str, + body: IO[bytes], + *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], + content_type: str = "application/json", + **kwargs: Any, ) -> UpdateMemoriesLROPoller: """Update memory store with conversation memories. @@ -97,6 +117,9 @@ def begin_update_memories( :type name: str :param body: Required. :type body: IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -109,15 +132,16 @@ def begin_update_memories( @distributed_trace @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], + method_added_on="v1", + params_added_on={"v1": ["api_version", "name", "content_type", "accept"]}, + api_versions_list=["v1"], ) def begin_update_memories( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, + foundry_features: Literal[FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW], scope: str = _Unset, items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, @@ -130,6 +154,9 @@ def begin_update_memories( :type name: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] + :keyword foundry_features: A feature flag opt-in required when using preview operations or + modifying persisted preview resources. MEMORY_STORES_V1_PREVIEW. Required. + :paramtype foundry_features: str or ~azure.ai.projects.models.MEMORY_STORES_V1_PREVIEW :keyword scope: The namespace that logically groups and isolates memories, such as a user ID. Required. :paramtype scope: str @@ -160,6 +187,7 @@ def begin_update_memories( cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_memories_initial( + foundry_features=foundry_features, name=name, body=body, scope=scope, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py index a55cfa902f82..33673a1d6a7c 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py @@ -13,10 +13,7 @@ from enum import Enum from typing import Any, Callable, Dict, List, Optional, Tuple, Union, TYPE_CHECKING from urllib.parse import urlparse -from azure.ai.projects.models._models import ( - Tool, - ItemResource, -) +from azure.ai.projects.models._models import Tool from azure.core import CaseInsensitiveEnumMeta # type: ignore from azure.core.settings import settings from azure.core.tracing import AbstractSpan @@ -709,7 +706,7 @@ def start_create_agent_span( # pylint: disable=too-many-locals description: Optional[str] = None, instructions: Optional[str] = None, _tools: Optional[List[Tool]] = None, - _tool_resources: Optional[ItemResource] = None, + _tool_resources: Optional[Any] = None, # TODO: Used to be: _tool_resources: Optional[ItemResource] = None, # _toolset: Optional["ToolSet"] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, @@ -919,7 +916,7 @@ def _create_agent_span_from_parameters( if text: # Handle different types of text objects if hasattr(text, "format"): - # Azure AI Agents PromptAgentDefinitionText model object + # Azure AI Agents PromptAgentDefinitionTextOptions model object format_info = getattr(text, "format", None) if format_info: if hasattr(format_info, "type"): diff --git a/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd b/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd index 6826a3a15591..4530aa4cf4f3 100644 --- a/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd +++ b/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd @@ -1,7 +1,7 @@ REM REM To emit from TypeSpec, run this in the current folder: REM -REM tsp-client update --debug --local-spec-repo e:\src\azure-rest-api-specs-pr\specification\ai\Azure.AI.Projects +REM tsp-client update --debug --local-spec-repo e:\src\azure-rest-api-specs\specification\ai-foundry\data-plane\Foundry REM REM (replace `e:\src\...` with the local folder containing up to date TypeSpec) REM @@ -10,7 +10,8 @@ REM REM Revert this, as we want to keep some edits to these file. git restore pyproject.toml -git restore azure\ai\projects\_version.py +REM Looks like this is no longer needed: +REM git restore azure\ai\projects\_version.py REM Rename "A2_A_PREVIEW" to "A2A_PREVIEW". Since this value is an extension to OpenAI.ToolType enum, we can't use @className in client.tsp to do the rename. powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'A2_A_PREVIEW', 'A2A_PREVIEW' | Set-Content azure\ai\projects\models\_models.py" @@ -20,14 +21,28 @@ REM Rename `"items_property": items`, to `"items": items` in search_memories and powershell -Command "(Get-Content azure\ai\projects\aio\operations\_operations.py) -replace '\"items_property\": items', '\"items\": items' | Set-Content azure\ai\projects\aio\operations\_operations.py" powershell -Command "(Get-Content azure\ai\projects\operations\_operations.py) -replace '\"items_property\": items', '\"items\": items' | Set-Content azure\ai\projects\operations\_operations.py" +REM Fix Sphinx issue in class ToolChoiceAllowed, in "tools" property doc string. Everything should be aligned including JSON example, like this: +REM """A list of tool definitions that the model should be allowed to call. For the Responses API, the +REM list of tool definitions might look like: +REM .. code-block:: json +REM [ +REM { \"type\": \"function\", \"name\": \"get_weather\" }, +REM { \"type\": \"mcp\", \"server_label\": \"deepwiki\" }, +REM { \"type\": \"image_generation\" } +REM ]. Required.""" + +REM Fix Sphinx issue: docstring of azure.ai.projects.models.WorkflowPreviewActionOutputItem.type:2: WARNING: Duplicate explicit target name: "learn more". [docutils] +REM Turns out this has nothing to do with doc string of class WorkflowPreviewActionOutputItem. Search for "learn more" +REM and change them to "learn more about ..." (e.g. "learn more about content safety"). + REM Fix type annotations by replacing "_types.Filters" with proper union type to fix Pyright errors -powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace '\"_types\.Filters\"', 'Union[\"_models.ComparisonFilter\", \"_models.CompoundFilter\"]' | Set-Content azure\ai\projects\models\_models.py" +REM powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace '\"_types\.Filters\"', 'Union[\"_models.ComparisonFilter\", \"_models.CompoundFilter\"]' | Set-Content azure\ai\projects\models\_models.py" REM Add additional pylint disables to the model_base.py file -powershell -Command "(Get-Content azure\ai\projects\_utils\model_base.py) -replace '# pylint: disable=protected-access, broad-except', '# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter' | Set-Content azure\ai\projects\_utils\model_base.py" +REM powershell -Command "(Get-Content azure\ai\projects\_utils\model_base.py) -replace '# pylint: disable=protected-access, broad-except', '# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter' | Set-Content azure\ai\projects\_utils\model_base.py" REM Add pyright ignore comment to created_by fields to suppress reportIncompatibleVariableOverride errors -powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'created_by: Optional\[str\] = rest_field\(visibility=\[\"read\", \"create\", \"update\", \"delete\", \"query\"\]\)', 'created_by: Optional[str] = rest_field(visibility=[\"read\", \"create\", \"update\", \"delete\", \"query\"]) # pyright: ignore[reportIncompatibleVariableOverride]' | Set-Content azure\ai\projects\models\_models.py" +REM powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'created_by: Optional\[str\] = rest_field\(visibility=\[\"read\", \"create\", \"update\", \"delete\", \"query\"\]\)', 'created_by: Optional[str] = rest_field(visibility=[\"read\", \"create\", \"update\", \"delete\", \"query\"]) # pyright: ignore[reportIncompatibleVariableOverride]' | Set-Content azure\ai\projects\models\_models.py" echo Now do these additional changes manually, if you want the "Generate docs" job to succeed in PR pipeline REM Remove `generate_summary` from class `Reasoning`. It's deprecated but causes two types of errors. Consider removing it from TypeSpec. diff --git a/sdk/ai/azure-ai-projects/pyproject.toml b/sdk/ai/azure-ai-projects/pyproject.toml index 7f55115918ef..d5a866a06ce3 100644 --- a/sdk/ai/azure-ai-projects/pyproject.toml +++ b/sdk/ai/azure-ai-projects/pyproject.toml @@ -33,7 +33,7 @@ keywords = ["azure", "azure sdk"] dependencies = [ "isodate>=0.6.1", - "azure-core>=1.36.0", + "azure-core>=1.37.0", "typing-extensions>=4.11", "azure-identity>=1.15.0", "openai>=2.8.0", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic.py index a3f87dcdeb81..7ca90a2efad3 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic_async.py index 5894722ed72d..3c33176da798 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_basic_async.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic.py index 1102d326b2f3..8d8c190c57d5 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic_async.py index 5cae332aa01f..4a5489db834a 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_retrieve_basic_async.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" aiohttp python-dotenv + pip install "azure-ai-projects>=2.0.0b4" aiohttp python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_stream_events.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_stream_events.py index 2e90a148bf9c..fa8e31045a55 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_stream_events.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_stream_events.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py index 31d84821591e..378d37d29b42 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py @@ -22,7 +22,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv pydantic + pip install "azure-ai-projects>=2.0.0b4" python-dotenv pydantic Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -37,7 +37,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - PromptAgentDefinitionText, + PromptAgentDefinitionTextOptions, TextResponseFormatJsonSchema, ) from pydantic import BaseModel, Field @@ -64,7 +64,7 @@ class CalendarEvent(BaseModel): agent_name="MyAgent", definition=PromptAgentDefinition( model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema(name="CalendarEvent", schema=CalendarEvent.model_json_schema()) ), instructions=""" diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py index beb96d3800a9..c5be9c79a436 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py @@ -22,7 +22,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp pydantic + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp pydantic Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -38,7 +38,7 @@ from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - PromptAgentDefinitionText, + PromptAgentDefinitionTextOptions, TextResponseFormatJsonSchema, ) from pydantic import BaseModel, Field @@ -65,7 +65,7 @@ async def main() -> None: agent_name="MyAgent", definition=PromptAgentDefinition( model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema(name="CalendarEvent", schema=CalendarEvent.model_json_schema()) ), instructions=""" diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py index 65e51a97aa6c..6fb3d9f8f065 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -29,9 +29,9 @@ from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, PromptAgentDefinition, WorkflowAgentDefinition, - ItemResourceType, ) load_dotenv() @@ -138,6 +138,7 @@ workflow = project_client.agents.create_version( agent_name="student-teacher-workflow", definition=WorkflowAgentDefinition(workflow=workflow_yaml), + foundry_features=FoundryFeaturesOptInKeys.WORKFLOW_AGENTS_V1_PREVIEW, ) print(f"Agent created (id: {workflow.id}, name: {workflow.name}, version: {workflow.version})") @@ -150,14 +151,14 @@ extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, - metadata={"x-ms-debug-mode-enabled": "1"}, + # REMOVE ME? metadata={"x-ms-debug-mode-enabled": "1"}, ) for event in stream: print(f"Event {event.sequence_number} type '{event.type}'", end="") if ( event.type == "response.output_item.added" or event.type == "response.output_item.done" - ) and event.item.type == ItemResourceType.WORKFLOW_ACTION: + ) and event.item.type == "workflow_action": print( f": item action ID '{event.item.action_id}' is '{event.item.status}' (previous action ID: '{event.item.previous_action_id}')", end="", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py index 293fbcf5fa09..db8919c0f426 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -32,7 +32,6 @@ from azure.ai.projects.models import ( PromptAgentDefinition, WorkflowAgentDefinition, - ItemResourceType, ) load_dotenv() @@ -152,14 +151,14 @@ async def main(): extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) async for event in stream: print(f"Event {event.sequence_number} type '{event.type}'", end="") if ( event.type == "response.output_item.added" or event.type == "response.output_item.done" - ) and event.item.type == ItemResourceType.WORKFLOW_ACTION: + ) and event.item.type == "workflow_action": print( f": item action ID '{event.item.action_id}' is '{event.item.status}' (previous action ID: '{event.item.previous_action_id}')", end="", diff --git a/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_azure_monitor_tracing.py b/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_azure_monitor_tracing.py index 5ceede581e59..e79fab387673 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_azure_monitor_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_azure_monitor_tracing.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv azure-monitor-opentelemetry + pip install "azure-ai-projects>=2.0.0b4" python-dotenv azure-monitor-opentelemetry Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing.py b/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing.py index 85e844cc5c27..5abdba346def 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv opentelemetry-sdk azure-core-tracing-opentelemetry + pip install "azure-ai-projects>=2.0.0b4" python-dotenv opentelemetry-sdk azure-core-tracing-opentelemetry Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing_custom_attributes.py b/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing_custom_attributes.py index 0864fcc3f9d9..2cc58950ca0f 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing_custom_attributes.py +++ b/sdk/ai/azure-ai-projects/samples/agents/telemetry/sample_agent_basic_with_console_tracing_custom_attributes.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv opentelemetry-sdk azure-core-tracing-opentelemetry + pip install "azure-ai-projects>=2.0.0b4" python-dotenv opentelemetry-sdk azure-core-tracing-opentelemetry Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py index 876ed221a2fe..eaa05ad3fda4 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py index ea15b1c331a5..d5bbee261047 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py index b09aa64347ee..891250783659 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py @@ -22,7 +22,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py index 5156baa55fd3..87d444c6b623 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py index 443f736d5cff..0f0c522bc61f 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py index 0f8f8eacf6ee..3aafc79626e7 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use.py index 04c0a336bd39..e439bd388b3b 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use.py @@ -20,7 +20,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use_async.py index 33d33ef33be6..86ae06ce232f 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_computer_use_async.py @@ -20,7 +20,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py index 3107cfe87e23..4a7639aaa308 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search.py index 9351a29a8977..43c71427f6e9 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search.py @@ -13,7 +13,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream.py index 21c04ba22bf6..c1a95213708c 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream_async.py index 31280352ac7c..746c83f58837 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_file_search_in_stream_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool.py index cfa1dad9ba7f..8c8a7bc4d281 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool_async.py index 9a70726fb2cf..4fc0ec2756ce 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_function_tool_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation.py index 3fd58f5aa926..815da33ac701 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation.py @@ -22,7 +22,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py index 0ad945453783..ae97e3817ccc 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py @@ -22,7 +22,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp.py index 17fcbe97df6f..f914919afc3f 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_async.py index ddcbc3e4614c..292a22509019 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection.py index 3534a00ae3eb..f0422331df33 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection_async.py index 2ccad0ca0a03..28072869aea8 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_mcp_with_project_connection_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index dd08e302693d..45160384b886 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). Once you have deployed models, set the deployment name in the variables below. @@ -41,6 +41,7 @@ from azure.core.exceptions import ResourceNotFoundError from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemorySearchPreviewTool, PromptAgentDefinition, @@ -60,7 +61,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - project_client.memory_stores.delete(memory_store_name) + project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -77,6 +80,7 @@ name=memory_store_name, description="Example memory store for conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") @@ -140,5 +144,7 @@ project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) print("Agent deleted") - project_client.memory_stores.delete(memory_store.name) + project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print("Memory store deleted") diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py index 4a54942a9f7a..f41ef0e449af 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). Once you have deployed models, set the deployment name in the variables below. @@ -41,6 +41,7 @@ from azure.core.exceptions import ResourceNotFoundError from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemorySearchPreviewTool, PromptAgentDefinition, @@ -63,7 +64,9 @@ async def main() -> None: # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - await project_client.memory_stores.delete(memory_store_name) + await project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -80,6 +83,7 @@ async def main() -> None: name=memory_store_name, description="Example memory store for conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") @@ -141,7 +145,9 @@ async def main() -> None: await project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) print("Agent deleted") - await project_client.memory_stores.delete(memory_store.name) + await project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print("Memory store deleted") diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py index 94f71a6b4634..0d2865edcd3a 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv jsonref + pip install "azure-ai-projects>=2.0.0b4" python-dotenv jsonref Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -26,7 +26,7 @@ import os import jsonref from dotenv import load_dotenv - +from typing import Any, cast from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( @@ -50,7 +50,7 @@ # [START tool_declaration] with open(weather_asset_file_path, "r") as f: - openapi_weather = jsonref.loads(f.read()) + openapi_weather = cast(dict[str, Any], jsonref.loads(f.read())) tool = OpenApiTool( openapi=OpenApiFunctionDefinition( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py index b1f4e2022321..63cbc730908d 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv jsonref + pip install "azure-ai-projects>=2.0.0b4" python-dotenv jsonref Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -29,7 +29,7 @@ import os import jsonref from dotenv import load_dotenv - +from typing import Any, cast from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( @@ -56,7 +56,7 @@ # [START tool_declaration] with open(tripadvisor_asset_file_path, "r") as f: - openapi_tripadvisor = jsonref.loads(f.read()) + openapi_tripadvisor = cast(dict[str, Any], jsonref.loads(f.read())) tool = OpenApiTool( openapi=OpenApiFunctionDefinition( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py index 4388b73f3f53..dec10b425b54 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py index b190090f9cf3..87fa94ab0649 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -68,7 +68,9 @@ ) print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") - user_input = input("Enter your question (e.g., 'What can the secondary agent do?'): \n") + user_input = os.environ.get("A2A_USER_INPUT") or input( + "Enter your question (e.g., 'What can the secondary agent do?'): \n" + ) stream_response = openai_client.responses.create( stream=True, diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search.py index 7ef45abec3dd..e522f691c40e 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search.py @@ -13,7 +13,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -27,7 +27,11 @@ from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient -from azure.ai.projects.models import PromptAgentDefinition, WebSearchPreviewTool, ApproximateLocation +from azure.ai.projects.models import ( + PromptAgentDefinition, + WebSearchTool, + WebSearchApproximateLocation, +) load_dotenv() @@ -40,9 +44,8 @@ project_client.get_openai_client() as openai_client, ): # [START tool_declaration] - tool = WebSearchPreviewTool(user_location=ApproximateLocation(country="GB", city="London", region="London")) + tool = WebSearchTool(user_location=WebSearchApproximateLocation(country="GB", city="London", region="London")) # [END tool_declaration] - # Create Agent with web search tool agent = project_client.agents.create_version( agent_name="MyAgent", @@ -60,13 +63,36 @@ print(f"Created conversation (id: {conversation.id})") # Send a query to search the web - response = openai_client.responses.create( - conversation=conversation.id, - input="Show me the latest London Underground service updates", + user_input = "Show me the latest London Underground service updates" + stream_response = openai_client.responses.create( + stream=True, + input=user_input, + tool_choice="required", extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, ) - print(f"Agent response: {response.output_text}") + for event in stream_response: + if event.type == "response.created": + print(f"Follow-up response created with ID: {event.response.id}") + elif event.type == "response.output_text.delta": + print(f"Delta: {event.delta}") + elif event.type == "response.text.done": + print(f"\nFollow-up response done!") + elif event.type == "response.output_item.done": + if event.item.type == "message": + item = event.item + if item.content[-1].type == "output_text": + text_content = item.content[-1] + for annotation in text_content.annotations: + if annotation.type == "url_citation": + print( + f"URL Citation: {annotation.url}, " + f"Start index: {annotation.start_index}, " + f"End index: {annotation.end_index}" + ) + elif event.type == "response.completed": + print(f"\nFollow-up completed!") + print(f"Full response: {event.response.output_text}") print("\nCleaning up...") project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) print("Agent deleted") diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search_preview.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search_preview.py new file mode 100644 index 000000000000..68c955e0f851 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search_preview.py @@ -0,0 +1,95 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to run Prompt Agent operations + using the Web Search Preview Tool and a synchronous client. + +USAGE: + python sample_agent_web_search_preview.py + + Before running the sample: + + pip install "azure-ai-projects>=2.0.0b4" python-dotenv + + Set these environment variables with your own values: + 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview + page of your Microsoft Foundry portal. + 2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Microsoft Foundry project. +""" + +import os +from dotenv import load_dotenv + +from azure.identity import DefaultAzureCredential +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import PromptAgentDefinition, WebSearchPreviewTool, ApproximateLocation + +load_dotenv() + + +endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + +with ( + DefaultAzureCredential() as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + project_client.get_openai_client() as openai_client, +): + # [START tool_declaration] + tool = WebSearchPreviewTool(user_location=ApproximateLocation(country="GB", city="London", region="London")) + # [END tool_declaration] + # Create Agent with web search tool + agent = project_client.agents.create_version( + agent_name="MyAgent105", + definition=PromptAgentDefinition( + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + instructions="You are a helpful assistant that can search the web", + tools=[tool], + ), + description="Agent for web search.", + ) + print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") + + # Create a conversation for the agent interaction + conversation = openai_client.conversations.create() + print(f"Created conversation (id: {conversation.id})") + + # Send a query to search the web + user_input = "Show me the latest London Underground service updates" + stream_response = openai_client.responses.create( + stream=True, + input=user_input, + tool_choice="required", + extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, + ) + + for event in stream_response: + if event.type == "response.created": + print(f"Follow-up response created with ID: {event.response.id}") + elif event.type == "response.output_text.delta": + print(f"Delta: {event.delta}") + elif event.type == "response.text.done": + print(f"\nFollow-up response done!") + elif event.type == "response.output_item.done": + if event.item.type == "message": + item = event.item + if item.content[-1].type == "output_text": + text_content = item.content[-1] + for annotation in text_content.annotations: + if annotation.type == "url_citation": + print( + f"URL Citation: {annotation.url}, " + f"Start index: {annotation.start_index}, " + f"End index: {annotation.end_index}" + ) + elif event.type == "response.completed": + print(f"\nFollow-up completed!") + print(f"Full response: {event.response.output_text}") + + print("\nCleaning up...") + project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) + print("Agent deleted") diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search_with_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search_with_custom_search.py new file mode 100644 index 000000000000..7bff7f43527d --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_web_search_with_custom_search.py @@ -0,0 +1,111 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Demonstrates Prompt Agent operations that use the Web Search Tool configured + with a Bing Custom Search connection. The agent runs synchronously and + pulls results from your specified custom search instance. + +USAGE: + python sample_agent_web_search_with_custom_search.py + + Before running the sample: + + pip install "azure-ai-projects>=2.0.0b4" python-dotenv + + Set these environment variables with your own values: + 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview + page of your Microsoft Foundry portal. + 2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Microsoft Foundry project. + 3) BING_CUSTOM_SEARCH_PROJECT_CONNECTION_ID - The Bing Custom Search project connection ID, + as found in the "Connections" tab in your Microsoft Foundry project. + 4) BING_CUSTOM_SEARCH_INSTANCE_NAME - The Bing Custom Search instance name + 5) BING_CUSTOM_USER_INPUT - (Optional) The question to ask. If not set, you will be prompted. +""" + +import os +from dotenv import load_dotenv + +from azure.identity import DefaultAzureCredential +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import ( + PromptAgentDefinition, + WebSearchTool, + WebSearchConfiguration, +) + +load_dotenv() + + +endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"] + +with ( + DefaultAzureCredential() as credential, + AIProjectClient(endpoint=endpoint, credential=credential) as project_client, + project_client.get_openai_client() as openai_client, +): + # [START tool_declaration] + tool = WebSearchTool( + custom_search_configuration=WebSearchConfiguration( + project_connection_id=os.environ["BING_CUSTOM_SEARCH_PROJECT_CONNECTION_ID"], + instance_name=os.environ["BING_CUSTOM_SEARCH_INSTANCE_NAME"], + ) + ) + # [END tool_declaration] + # Create Agent with web search tool + agent = project_client.agents.create_version( + agent_name="MyAgent", + definition=PromptAgentDefinition( + model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + instructions="You are a helpful assistant that can search the web and bing", + tools=[tool], + ), + description="Agent for web search.", + ) + print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") + + # Create a conversation for the agent interaction + conversation = openai_client.conversations.create() + print(f"Created conversation (id: {conversation.id})") + + user_input = os.environ.get("BING_CUSTOM_USER_INPUT") or input("Enter your question: \n") + + # Send a query to search the web + # Send initial request that will trigger the Bing Custom Search tool + stream_response = openai_client.responses.create( + stream=True, + input=user_input, + tool_choice="required", + extra_body={"agent": {"name": agent.name, "type": "agent_reference"}}, + ) + + for event in stream_response: + if event.type == "response.created": + print(f"Follow-up response created with ID: {event.response.id}") + elif event.type == "response.output_text.delta": + print(f"Delta: {event.delta}") + elif event.type == "response.text.done": + print(f"\nFollow-up response done!") + elif event.type == "response.output_item.done": + if event.item.type == "message": + item = event.item + if item.content[-1].type == "output_text": + text_content = item.content[-1] + for annotation in text_content.annotations: + if annotation.type == "url_citation": + print( + f"URL Citation: {annotation.url}, " + f"Start index: {annotation.start_index}, " + f"End index: {annotation.end_index}" + ) + elif event.type == "response.completed": + print(f"\nFollow-up completed!") + print(f"Full response: {event.response.output_text}") + + print("\nCleaning up...") + project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version) + print("Agent deleted") diff --git a/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py b/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py index 33266e030587..6a075b23b96c 100644 --- a/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py +++ b/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py b/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py index f199e86ba35c..2e115e18aaf2 100644 --- a/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py +++ b/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py index 5521931ed21c..a2c2c30c9e0b 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py index 4c36b4518c79..d159fea9e2af 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_download.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_download.py index dc701ca81b81..2600f864c365 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_download.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_download.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py index 295e713d1846..70a3a981e015 100644 --- a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py +++ b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py @@ -13,7 +13,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py index cd0c322c6fbc..bbc5db10b6d4 100644 --- a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py +++ b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py @@ -13,7 +13,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/README.md b/sdk/ai/azure-ai-projects/samples/evaluations/README.md index ef774d201eaa..6a8897879a9c 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/README.md +++ b/sdk/ai/azure-ai-projects/samples/evaluations/README.md @@ -7,7 +7,7 @@ This folder contains samples demonstrating how to use Azure AI Foundry's evaluat Before running any sample: ```bash -pip install "azure-ai-projects>=2.0.0b1" python-dotenv +pip install "azure-ai-projects>=2.0.0b4" python-dotenv ``` Set these environment variables: diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_coherence.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_coherence.py index 4f3b93813bb4..7cc8638693e5 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_coherence.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_coherence.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_fluency.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_fluency.py index fc51bf844426..cd4dce1ce152 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_fluency.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_fluency.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_generic_agentic_evaluator/sample_generic_agentic_evaluator.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_generic_agentic_evaluator/sample_generic_agentic_evaluator.py index b4cd599e684d..a21b770b777b 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_generic_agentic_evaluator/sample_generic_agentic_evaluator.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_generic_agentic_evaluator/sample_generic_agentic_evaluator.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_groundedness.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_groundedness.py index e59ae572009a..ab22619ee672 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_groundedness.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_groundedness.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_intent_resolution.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_intent_resolution.py index 176733e9894e..97b23e78e02f 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_intent_resolution.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_intent_resolution.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_relevance.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_relevance.py index c6046938fc63..2010cb056409 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_relevance.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_relevance.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_response_completeness.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_response_completeness.py index e63911bfbb9a..e009731db128 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_response_completeness.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_response_completeness.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_adherence.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_adherence.py index 0cbc213e475a..67c2dad71fbb 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_adherence.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_adherence.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_completion.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_completion.py index e91915fd67ad..87804396d05b 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_completion.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_completion.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_navigation_efficiency.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_navigation_efficiency.py index cac84b1c9750..f1f9353d3e77 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_navigation_efficiency.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_task_navigation_efficiency.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_accuracy.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_accuracy.py index c70261d52d9e..7903ae929512 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_accuracy.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_accuracy.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_success.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_success.py index 9e9ed9d042bb..84211c2cf320 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_success.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_call_success.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_input_accuracy.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_input_accuracy.py index c6479709cda7..f3d0f6a688a7 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_input_accuracy.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_input_accuracy.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_output_utilization.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_output_utilization.py index 172cb4fa9f3c..6ee61ee717f6 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_output_utilization.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_output_utilization.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_selection.py b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_selection.py index 571f2060c2cc..8ab24c315d0e 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_selection.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/agentic_evaluators/sample_tool_selection.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_evaluation.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_evaluation.py index af587d75b6b7..b2590349893a 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_evaluation.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_evaluation.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation.py index 3ae0c4b4db98..8ea75c3247c5 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation_with_function_tool.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation_with_function_tool.py index 64b7099abf1e..ecc232f0ebcb 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation_with_function_tool.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_agent_response_evaluation_with_function_tool.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_continuous_evaluation_rule.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_continuous_evaluation_rule.py index 9a9972678e0c..f24bc14f208e 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_continuous_evaluation_rule.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_continuous_evaluation_rule.py @@ -25,7 +25,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog.py index f00b953f6c37..b71c67fb17b1 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your @@ -26,6 +26,7 @@ from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, EvaluatorVersion, EvaluatorCategory, PromptBasedEvaluatorDefinition, @@ -99,6 +100,7 @@ prompt_evaluator = project_client.evaluators.create_version( name="my_custom_evaluator_code_prompt_based", evaluator_version=evaluator_version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) pprint(prompt_evaluator) @@ -134,7 +136,9 @@ ), ) code_evaluator = project_client.evaluators.create_version( - name="my_custom_evaluator_code_based", evaluator_version=evaluator_version + name="my_custom_evaluator_code_based", + evaluator_version=evaluator_version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) pprint(code_evaluator) @@ -142,6 +146,7 @@ code_evaluator_latest = project_client.evaluators.get_version( name=code_evaluator.name, version=code_evaluator.version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) pprint(code_evaluator_latest) @@ -149,6 +154,7 @@ prompt_evaluator_latest = project_client.evaluators.get_version( name=prompt_evaluator.name, version=prompt_evaluator.version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) pprint(prompt_evaluator_latest) @@ -161,6 +167,7 @@ "display_name": "my_custom_evaluator_updated", "description": "Custom evaluator description changed", }, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) pprint(updated_evaluator) @@ -168,21 +175,27 @@ project_client.evaluators.delete_version( name=code_evaluator_latest.name, version=code_evaluator_latest.version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) project_client.evaluators.delete_version( name=prompt_evaluator_latest.name, version=prompt_evaluator_latest.version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) print("Getting list of builtin evaluator versions") - evaluators = project_client.evaluators.list_latest_versions(type="builtin") + evaluators = project_client.evaluators.list_latest_versions( + type="builtin", foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW + ) print("List of builtin evaluator versions") for evaluator in evaluators: pprint(evaluator) print("Getting list of custom evaluator versions") - evaluators = project_client.evaluators.list_latest_versions(type="custom") + evaluators = project_client.evaluators.list_latest_versions( + type="custom", foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW + ) print("List of custom evaluator versions") for evaluator in evaluators: pprint(evaluator) diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_code_based_evaluators.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_code_based_evaluators.py index d4dec7209354..0a312628be61 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_code_based_evaluators.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_code_based_evaluators.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your @@ -26,7 +26,7 @@ import os from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient -from azure.ai.projects.models import EvaluatorCategory, EvaluatorDefinitionType +from azure.ai.projects.models import EvaluatorCategory, EvaluatorDefinitionType, FoundryFeaturesOptInKeys from openai.types.evals.create_eval_jsonl_run_data_source_param import ( CreateEvalJSONLRunDataSourceParam, @@ -97,6 +97,7 @@ }, }, }, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) data_source_config = DataSourceConfigCustom( @@ -202,6 +203,7 @@ project_client.evaluators.delete_version( name=code_evaluator.name, version=code_evaluator.version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) client.evals.delete(eval_id=eval_object.id) diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_prompt_based_evaluators.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_prompt_based_evaluators.py index 9c13d93b3d29..1b9b65bbbbc3 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_prompt_based_evaluators.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_eval_catalog_prompt_based_evaluators.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your @@ -59,7 +59,7 @@ import os from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient -from azure.ai.projects.models import EvaluatorCategory, EvaluatorDefinitionType +from azure.ai.projects.models import EvaluatorCategory, EvaluatorDefinitionType, FoundryFeaturesOptInKeys from openai.types.evals.create_eval_jsonl_run_data_source_param import ( CreateEvalJSONLRunDataSourceParam, @@ -154,6 +154,7 @@ }, }, }, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) pprint(prompt_evaluator) @@ -268,6 +269,7 @@ project_client.evaluators.delete_version( name=prompt_evaluator.name, version=prompt_evaluator.version, + foundry_features=FoundryFeaturesOptInKeys.EVALUATIONS_V1_PREVIEW, ) client.evals.delete(eval_id=eval_object.id) diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_cluster_insight.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_cluster_insight.py index d0ff775ca3bf..85f5420cc98a 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_cluster_insight.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_cluster_insight.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -32,8 +32,13 @@ from typing import Union from pprint import pprint from dotenv import load_dotenv -from azure.ai.projects.models._enums import OperationState -from azure.ai.projects.models._models import EvaluationRunClusterInsightsRequest, Insight, InsightModelConfiguration +from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, + OperationState, + EvaluationRunClusterInsightRequest, + Insight, + InsightModelConfiguration, +) from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from openai.types.eval_create_params import DataSourceConfigCustom, TestingCriterionLabelModel @@ -119,20 +124,23 @@ print(f"Evaluation run result counts: {eval_run.result_counts}") clusterInsight = project_client.insights.generate( - Insight( + insight=Insight( display_name="Cluster analysis", - request=EvaluationRunClusterInsightsRequest( + request=EvaluationRunClusterInsightRequest( eval_id=eval_object.id, run_ids=[eval_run.id], model_configuration=InsightModelConfiguration(model_deployment_name=model_deployment_name), ), - ) + ), + foundry_features=FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW, ) print(f"Started insight generation (id: {clusterInsight.id})") while clusterInsight.state not in [OperationState.SUCCEEDED, OperationState.FAILED]: - print("Waiting for insight to be generated...") - clusterInsight = project_client.insights.get(id=clusterInsight.id) + print(f"Waiting for insight to be generated...") + clusterInsight = project_client.insights.get( + id=clusterInsight.id, foundry_features=FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW + ) print(f"Insight status: {clusterInsight.state}") time.sleep(5) diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_compare_insight.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_compare_insight.py index 953003de1ce1..fb8f131e4219 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_compare_insight.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluation_compare_insight.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -31,8 +31,12 @@ import time from pprint import pprint from dotenv import load_dotenv -from azure.ai.projects.models._enums import OperationState -from azure.ai.projects.models._models import EvaluationComparisonRequest, Insight +from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, + OperationState, + EvaluationComparisonInsightRequest, + Insight, +) from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from openai.types.eval_create_params import DataSourceConfigCustom, TestingCriterionLabelModel @@ -132,17 +136,20 @@ # Generate comparison insights compareInsight = project_client.insights.generate( - Insight( + insight=Insight( display_name="Comparison of Evaluation Runs", - request=EvaluationComparisonRequest( + request=EvaluationComparisonInsightRequest( eval_id=eval_object.id, baseline_run_id=eval_run_1.id, treatment_run_ids=[eval_run_2.id] ), - ) + ), + foundry_features=FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW, ) print(f"Started insight generation (id: {compareInsight.id})") while compareInsight.state not in [OperationState.SUCCEEDED, OperationState.FAILED]: - compareInsight = project_client.insights.get(id=compareInsight.id) + compareInsight = project_client.insights.get( + id=compareInsight.id, foundry_features=FoundryFeaturesOptInKeys.INSIGHTS_V1_PREVIEW + ) print(f"Waiting for insight to be generated...current status: {compareInsight.state}") time.sleep(5) diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_ai_assisted.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_ai_assisted.py index 4312245c1b12..efa8af0ba58f 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_ai_assisted.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_ai_assisted.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_dataset_id.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_dataset_id.py index 209f26daf171..a1998429e8b0 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_dataset_id.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_dataset_id.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data.py index 642ec007ce7e..0ce61c5b6186 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data_oai.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data_oai.py index ef8a44ba47d4..f69a7c8d19b4 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data_oai.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_inline_data_oai.py @@ -43,8 +43,7 @@ client = OpenAI( api_key=get_bearer_token_provider(DefaultAzureCredential(), "https://ai.azure.com/.default"), - base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai", - default_query={"api-version": "2025-11-15-preview"}, + base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai/v1", ) model_deployment_name = os.environ.get("AZURE_AI_MODEL_DEPLOYMENT_NAME", "") # Sample : gpt-4o-mini diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_traces.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_traces.py index 8658a605498f..249705035cb5 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_traces.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_builtin_with_traces.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv azure-monitor-query + pip install "azure-ai-projects>=2.0.0b4" python-dotenv azure-monitor-query Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_graders.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_graders.py index d8c89f72666c..5138132c6bd2 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_graders.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_evaluations_graders.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_model_evaluation.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_model_evaluation.py index 91e45ecf3355..38a4e921d118 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_model_evaluation.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_model_evaluation.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_redteam_evaluations.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_redteam_evaluations.py index 4773ae76102c..29e7f77db6e3 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_redteam_evaluations.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_redteam_evaluations.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/evaluations/sample_scheduled_evaluations.py b/sdk/ai/azure-ai-projects/samples/evaluations/sample_scheduled_evaluations.py index 18ba9481a951..9f301303840e 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluations/sample_scheduled_evaluations.py +++ b/sdk/ai/azure-ai-projects/samples/evaluations/sample_scheduled_evaluations.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv azure-mgmt-authorization azure-mgmt-resource + pip install "azure-ai-projects>=2.0.0b4" python-dotenv azure-mgmt-authorization azure-mgmt-resource Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/files/sample_files.py b/sdk/ai/azure-ai-projects/samples/files/sample_files.py index 7b70bf34c9ea..bb59539c2dac 100644 --- a/sdk/ai/azure-ai-projects/samples/files/sample_files.py +++ b/sdk/ai/azure-ai-projects/samples/files/sample_files.py @@ -14,7 +14,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/files/sample_files_async.py b/sdk/ai/azure-ai-projects/samples/files/sample_files_async.py index e417171ed002..0404b2981788 100644 --- a/sdk/ai/azure-ai-projects/samples/files/sample_files_async.py +++ b/sdk/ai/azure-ai-projects/samples/files/sample_files_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job.py index 6e619bcd84b9..8f7101256308 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job.py @@ -15,7 +15,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job_async.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job_async.py index 5b2064b284e1..413d6f743b68 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job_async.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_dpo_job_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job.py index 6e7b4d26cea8..e5ed89eb4654 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job.py @@ -15,7 +15,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job_async.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job_async.py index 0335e45febe9..cf921bfd9154 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job_async.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_oss_models_supervised_job_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job.py index 064f4c36e4d6..79e0cbb95158 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job.py @@ -15,7 +15,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job_async.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job_async.py index 81cbade07c75..84433bf47419 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job_async.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_reinforcement_job_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py index 78131c9b9f8f..a48a613f919b 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job.py @@ -18,7 +18,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv azure-mgmt-cognitiveservices + pip install "azure-ai-projects>=2.0.0b4" python-dotenv azure-mgmt-cognitiveservices Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py index 7ed68304bc17..86d67171429b 100644 --- a/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py +++ b/sdk/ai/azure-ai-projects/samples/finetuning/sample_finetuning_supervised_job_async.py @@ -18,7 +18,7 @@ Before running the sample: - pip install azure-ai-projects>=2.0.0b1 python-dotenv aiohttp azure-mgmt-cognitiveservices + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp azure-mgmt-cognitiveservices Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py index d8429f8d1ff1..9534aae7e309 100644 --- a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py +++ b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py index ce37089d20d6..24f05fa57266 100644 --- a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py +++ b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/mcp_client/sample_mcp_tool_async.py b/sdk/ai/azure-ai-projects/samples/mcp_client/sample_mcp_tool_async.py index de76f3097ceb..216fd54a7057 100644 --- a/sdk/ai/azure-ai-projects/samples/mcp_client/sample_mcp_tool_async.py +++ b/sdk/ai/azure-ai-projects/samples/mcp_client/sample_mcp_tool_async.py @@ -26,7 +26,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv mcp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv mcp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py index 93d2a41866e4..b9d3ab344c6b 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). Once you have deployed models, set the deployment name in the variables below. @@ -39,6 +39,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( EasyInputMessage, + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, @@ -56,7 +57,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - project_client.memory_stores.delete(memory_store_name) + project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -76,6 +79,7 @@ name=memory_store_name, description="Example memory store for conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") @@ -84,12 +88,15 @@ scope = "user_123" # Extract memories from messages and add them to the memory store - user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") + user_message = EasyInputMessage( + role="user", content="I prefer dark roast coffee and usually drink it in the morning" + ) update_poller = project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, items=[user_message], # Pass conversation items that you want to add to memory update_delay=300, # Keep default inactivity delay before starting update + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Scheduled memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})") @@ -101,6 +108,7 @@ items=[new_message], previous_update_id=update_poller.update_id, # Extend from previous update ID update_delay=0, # Trigger update immediately without waiting for inactivity + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print( f"Scheduled memory update operation (Update ID: {new_update_poller.update_id}, Status: {new_update_poller.status()})" @@ -123,14 +131,20 @@ # Retrieve memories from the memory store query_message = EasyInputMessage(role="user", content="What are my morning coffee preferences?") search_response = project_client.memory_stores.search_memories( - name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + name=memory_store.name, + scope=scope, + items=[query_message], + options=MemorySearchOptions(max_memories=5), + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Found {len(search_response.memories)} memories") for memory in search_response.memories: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Perform another search using the previous search as context - agent_message = EasyInputMessage(role="assistant", content="You previously indicated a preference for dark roast coffee in the morning.") + agent_message = EasyInputMessage( + role="assistant", content="You previously indicated a preference for dark roast coffee in the morning." + ) followup_query = EasyInputMessage(role="user", content="What about afternoon?") followup_search_response = project_client.memory_stores.search_memories( name=memory_store.name, @@ -138,15 +152,20 @@ items=[agent_message, followup_query], previous_search_id=search_response.search_id, options=MemorySearchOptions(max_memories=5), + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Found {len(followup_search_response.memories)} memories") for memory in followup_search_response.memories: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Delete memories for the current scope - project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + project_client.memory_stores.delete_scope( + name=memory_store.name, scope=scope, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memories for scope '{scope}'") # Delete memory store - project_client.memory_stores.delete(memory_store.name) + project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memory store `{memory_store.name}`") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py index 0f607302208e..28e6e7a9d29f 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py @@ -18,7 +18,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). Once you have deployed models, set the deployment name in the variables below. @@ -40,6 +40,7 @@ from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( EasyInputMessage, + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, @@ -60,7 +61,9 @@ async def main() -> None: # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - await project_client.memory_stores.delete(memory_store_name) + await project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -80,6 +83,7 @@ async def main() -> None: name=memory_store_name, description="Example memory store for conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") @@ -88,12 +92,15 @@ async def main() -> None: scope = "user_123" # Extract memories from messages and add them to the memory store - user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") + user_message = EasyInputMessage( + role="user", content="I prefer dark roast coffee and usually drink it in the morning" + ) update_poller = await project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, items=[user_message], # Pass conversation items that you want to add to memory update_delay=300, # Keep default inactivity delay before starting update + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print( f"Scheduled memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})" @@ -107,6 +114,7 @@ async def main() -> None: items=[new_message], previous_update_id=update_poller.update_id, # Extend from previous update ID update_delay=0, # Trigger update immediately without waiting for inactivity + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print( f"Scheduled memory update operation (Update ID: {new_update_poller.update_id}, Status: {new_update_poller.status()})" @@ -129,14 +137,20 @@ async def main() -> None: # Retrieve memories from the memory store query_message = EasyInputMessage(role="user", content="What are my morning coffee preferences?") search_response = await project_client.memory_stores.search_memories( - name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + name=memory_store.name, + scope=scope, + items=[query_message], + options=MemorySearchOptions(max_memories=5), + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Found {len(search_response.memories)} memories") for memory in search_response.memories: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Perform another search using the previous search as context - agent_message = EasyInputMessage(role="assistant", content="You previously indicated a preference for dark roast coffee in the morning.") + agent_message = EasyInputMessage( + role="assistant", content="You previously indicated a preference for dark roast coffee in the morning." + ) followup_query = EasyInputMessage(role="user", content="What about afternoon?") followup_search_response = await project_client.memory_stores.search_memories( name=memory_store.name, @@ -144,17 +158,22 @@ async def main() -> None: items=[agent_message, followup_query], previous_search_id=search_response.search_id, options=MemorySearchOptions(max_memories=5), + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Found {len(followup_search_response.memories)} memories") for memory in followup_search_response.memories: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Delete memories for the current scope - await project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + await project_client.memory_stores.delete_scope( + name=memory_store.name, scope=scope, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memories for scope '{scope}'") # Delete memory store - await project_client.memory_stores.delete(memory_store.name) + await project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memory store `{memory_store.name}`") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py index 3969e8ffb779..95033dc066b3 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). Once you have deployed models, set the deployment name in the variables below. @@ -37,6 +37,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( EasyInputMessage, + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, @@ -54,7 +55,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - project_client.memory_stores.delete(memory_store_name) + project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -71,6 +74,7 @@ name=memory_store_name, description="Example memory store for conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") if isinstance(memory_store.definition, MemoryStoreDefaultDefinition): @@ -82,12 +86,15 @@ scope = "user_123" # Add a memory to the memory store - user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") + user_message = EasyInputMessage( + role="user", content="I prefer dark roast coffee and usually drink it in the morning" + ) update_poller = project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, items=[user_message], # Pass conversation items that you want to add to memory update_delay=0, # Trigger update immediately without waiting for inactivity + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) # Wait for the update operation to complete, but can also fire and forget @@ -101,16 +108,24 @@ # Retrieve memories from the memory store query_message = EasyInputMessage(role="user", content="What are my coffee preferences?") search_response = project_client.memory_stores.search_memories( - name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + name=memory_store.name, + scope=scope, + items=[query_message], + options=MemorySearchOptions(max_memories=5), + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Found {len(search_response.memories)} memories") for memory in search_response.memories: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Delete memories for a specific scope - project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + project_client.memory_stores.delete_scope( + name=memory_store.name, scope=scope, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memories for scope '{scope}'") # Delete memory store - project_client.memory_stores.delete(memory_store.name) + project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memory store `{memory_store.name}`") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py index 6cf110c3a932..02a66c24f057 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Deploy a chat model (e.g. gpt-4.1) and an embedding model (e.g. text-embedding-3-small). Once you have deployed models, set the deployment name in the variables below. @@ -39,6 +39,7 @@ from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( EasyInputMessage, + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, @@ -59,7 +60,9 @@ async def main() -> None: # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - await project_client.memory_stores.delete(memory_store_name) + await project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -76,6 +79,7 @@ async def main() -> None: name=memory_store_name, description="Example memory store for conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") if isinstance(memory_store.definition, MemoryStoreDefaultDefinition): @@ -87,12 +91,15 @@ async def main() -> None: scope = "user_123" # Add a memory to the memory store - user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") + user_message = EasyInputMessage( + role="user", content="I prefer dark roast coffee and usually drink it in the morning" + ) update_poller = await project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, items=[user_message], # Pass conversation items that you want to add to memory update_delay=0, # Trigger update immediately without waiting for inactivity + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) # Wait for the update operation to complete, but can also fire and forget @@ -106,18 +113,26 @@ async def main() -> None: # Retrieve memories from the memory store query_message = EasyInputMessage(role="user", content="What are my coffee preferences?") search_response = await project_client.memory_stores.search_memories( - name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) + name=memory_store.name, + scope=scope, + items=[query_message], + options=MemorySearchOptions(max_memories=5), + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Found {len(search_response.memories)} memories") for memory in search_response.memories: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Delete memories for a specific scope - await project_client.memory_stores.delete_scope(name=memory_store.name, scope=scope) + await project_client.memory_stores.delete_scope( + name=memory_store.name, scope=scope, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memories for scope '{scope}'") # Delete memory store - await project_client.memory_stores.delete(memory_store.name) + await project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted memory store `{memory_store.name}`") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py index 3101976e7c34..61f33c17458e 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -33,7 +33,7 @@ from azure.core.exceptions import ResourceNotFoundError from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient -from azure.ai.projects.models import MemoryStoreDefaultDefinition +from azure.ai.projects.models import FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition load_dotenv() @@ -47,7 +47,9 @@ # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - project_client.memory_stores.delete(memory_store_name) + project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -58,24 +60,37 @@ embedding_model=os.environ["MEMORY_STORE_EMBEDDING_MODEL_DEPLOYMENT_NAME"], ) memory_store = project_client.memory_stores.create( - name=memory_store_name, description="Example memory store for conversations", definition=definition + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") # Get Memory Store - get_store = project_client.memory_stores.get(memory_store.name) + get_store = project_client.memory_stores.get( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Retrieved: {get_store.name} ({get_store.id}): {get_store.description}") # Update Memory Store - updated_store = project_client.memory_stores.update(name=memory_store.name, description="Updated description") + updated_store = project_client.memory_stores.update( + name=memory_store.name, + description="Updated description", + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, + ) print(f"Updated: {updated_store.name} ({updated_store.id}): {updated_store.description}") # List Memory Store - memory_stores = list(project_client.memory_stores.list(limit=10)) + memory_stores = list( + project_client.memory_stores.list(limit=10, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW) + ) print(f"Found {len(memory_stores)} memory stores") for store in memory_stores: print(f" - {store.name} ({store.id}): {store.description}") # Delete Memory Store - delete_response = project_client.memory_stores.delete(memory_store.name) + delete_response = project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted: {delete_response.deleted}") diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py index 6d96c5d23aba..a573b47c2f3a 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_crud_async.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview @@ -34,7 +34,7 @@ from azure.core.exceptions import ResourceNotFoundError from azure.identity.aio import DefaultAzureCredential from azure.ai.projects.aio import AIProjectClient -from azure.ai.projects.models import MemoryStoreDefaultDefinition +from azure.ai.projects.models import FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition load_dotenv() @@ -51,7 +51,9 @@ async def main() -> None: # Delete memory store, if it already exists memory_store_name = "my_memory_store" try: - await project_client.memory_stores.delete(memory_store_name) + await project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -62,30 +64,41 @@ async def main() -> None: embedding_model=os.environ["MEMORY_STORE_EMBEDDING_MODEL_DEPLOYMENT_NAME"], ) memory_store = await project_client.memory_stores.create( - name=memory_store_name, description="Example memory store for conversations", definition=definition + name=memory_store_name, + description="Example memory store for conversations", + definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Created memory store: {memory_store.name} ({memory_store.id}): {memory_store.description}") # Get Memory Store - get_store = await project_client.memory_stores.get(memory_store.name) + get_store = await project_client.memory_stores.get( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Retrieved: {get_store.name} ({get_store.id}): {get_store.description}") # Update Memory Store updated_store = await project_client.memory_stores.update( - name=memory_store.name, description="Updated description" + name=memory_store.name, + description="Updated description", + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"Updated: {updated_store.name} ({updated_store.id}): {updated_store.description}") # List Memory Store memory_stores = [] - async for store in project_client.memory_stores.list(limit=10): + async for store in project_client.memory_stores.list( + limit=10, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ): memory_stores.append(store) print(f"Found {len(memory_stores)} memory stores") for store in memory_stores: print(f" - {store.name} ({store.id}): {store.description}") # Delete Memory Store - delete_response = await project_client.memory_stores.delete(memory_store.name) + delete_response = await project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Deleted: {delete_response.deleted}") diff --git a/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team.py b/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team.py index e5e2252501e5..747fd3d91d51 100644 --- a/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team.py +++ b/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team_async.py b/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team_async.py index 34b4580ee80c..435a088ecce8 100644 --- a/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team_async.py +++ b/sdk/ai/azure-ai-projects/samples/red_team/sample_red_team_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic.py index 5dd5e86d385e..1bb5d52fcdbf 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" openai azure-identity python-dotenv + pip install "azure-ai-projects>=2.0.0b4" openai azure-identity python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_async.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_async.py index 590c85480b27..752ddbf40b78 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_async.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_async.py @@ -15,7 +15,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient.py index 12ca5a99fe68..9c5d8b656bd7 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient.py @@ -34,8 +34,7 @@ openai = OpenAI( api_key=get_bearer_token_provider(DefaultAzureCredential(), "https://ai.azure.com/.default"), - base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai", - default_query={"api-version": "2025-11-15-preview"}, + base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai/v1", ) response = openai.responses.create( diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient_async.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient_async.py index 4eb408389364..8a2934ff7418 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient_async.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_basic_without_aiprojectclient_async.py @@ -42,8 +42,7 @@ async def main() -> None: openai = AsyncOpenAI( api_key=get_bearer_token_provider(credential, "https://ai.azure.com/.default"), - base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai", - default_query={"api-version": "2025-11-15-preview"}, + base_url=os.environ["AZURE_AI_PROJECT_ENDPOINT"].rstrip("/") + "/openai/v1", ) async with openai: diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_image_input.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_image_input.py index d75a83c8feaf..ffede4ef3b58 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_image_input.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_image_input.py @@ -16,7 +16,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_events.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_events.py index 6c4ab37c17a7..7068a2fd24f5 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_events.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_events.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_manager.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_manager.py index 31368d285576..b3ab4ecac13f 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_manager.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_stream_manager.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_structured_output.py b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_structured_output.py index e45bbdd5f8a5..0e081799afb0 100644 --- a/sdk/ai/azure-ai-projects/samples/responses/sample_responses_structured_output.py +++ b/sdk/ai/azure-ai-projects/samples/responses/sample_responses_structured_output.py @@ -17,7 +17,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview diff --git a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py index 697a2e78d614..cf35d6d3cad7 100644 --- a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py +++ b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv + pip install "azure-ai-projects>=2.0.0b4" python-dotenv Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py index bb5a5c55dac4..5c53760c58f6 100644 --- a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py +++ b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py @@ -14,7 +14,7 @@ Before running the sample: - pip install "azure-ai-projects>=2.0.0b1" python-dotenv aiohttp + pip install "azure-ai-projects>=2.0.0b4" python-dotenv aiohttp Set these environment variables with your own values: 1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py index b5e62189aede..e216e6ff22cb 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py @@ -10,7 +10,7 @@ from azure.ai.projects.telemetry import AIProjectInstrumentor, _utils from azure.core.settings import settings from gen_ai_trace_verifier import GenAiTraceVerifier -from azure.ai.projects.models import PromptAgentDefinition, PromptAgentDefinitionText +from azure.ai.projects.models import PromptAgentDefinition, PromptAgentDefinitionTextOptions from azure.ai.projects.models import ( Reasoning, @@ -61,6 +61,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAiAgentsInstrumentor(TestAiAgentsInstrumentorBase): """Tests for AI agents instrumentor.""" @@ -555,7 +558,7 @@ def _test_agent_with_structured_output_with_instructions_impl( agent_definition = PromptAgentDefinition( model=model, instructions="You are a helpful assistant that extracts person information.", - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema( name="PersonInfo", schema=test_schema, @@ -737,7 +740,7 @@ def _test_agent_with_structured_output_without_instructions_impl( agent_definition = PromptAgentDefinition( model=model, - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema( name="Result", schema=test_schema, diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py index bda4cd2aba04..07dfe2d8f84a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py @@ -9,7 +9,7 @@ from azure.ai.projects.telemetry import AIProjectInstrumentor, _utils from azure.core.settings import settings from gen_ai_trace_verifier import GenAiTraceVerifier -from azure.ai.projects.models import PromptAgentDefinition, PromptAgentDefinitionText +from azure.ai.projects.models import PromptAgentDefinition, PromptAgentDefinitionTextOptions from azure.ai.projects.models import ( Reasoning, FunctionTool, @@ -58,6 +58,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAiAgentsInstrumentor(TestAiAgentsInstrumentorBase): """Tests for AI agents instrumentor.""" @@ -424,7 +427,7 @@ async def _test_agent_with_structured_output_with_instructions_impl( agent_definition = PromptAgentDefinition( model=model, instructions="You are a helpful assistant that extracts person information.", - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema( name="PersonInfo", schema=test_schema, @@ -609,7 +612,7 @@ async def _test_agent_with_structured_output_without_instructions_impl( agent_definition = PromptAgentDefinition( model=model, # No instructions provided - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema( name="Result", schema=test_schema, diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_instrumentor_base.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_instrumentor_base.py index 55dff7d9a421..8e9e580de5e5 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_instrumentor_base.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_instrumentor_base.py @@ -31,6 +31,9 @@ class MessageCreationMode(IntEnum): ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAiAgentsInstrumentorBase(TestBase): """The utility methods, used by AI Instrumentor test.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py index b70c801f7bd7..6b535576da17 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py @@ -52,6 +52,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentor(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor with real endpoints.""" @@ -4654,7 +4657,6 @@ def test_workflow_agent_non_streaming_with_content_recording(self, **kwargs): """Test workflow agent with non-streaming and content recording enabled.""" from azure.ai.projects.models import ( WorkflowAgentDefinition, - AgentReference, PromptAgentDefinition, ) @@ -4771,7 +4773,7 @@ def test_workflow_agent_non_streaming_with_content_recording(self, **kwargs): response = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=False, ) @@ -4855,7 +4857,7 @@ def test_workflow_agent_non_streaming_with_content_recording(self, **kwargs): @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_workflow_agent_non_streaming_without_content_recording(self, **kwargs): """Test workflow agent with non-streaming and content recording disabled.""" - from azure.ai.projects.models import WorkflowAgentDefinition, AgentReference + from azure.ai.projects.models import WorkflowAgentDefinition self.cleanup() os.environ.update( @@ -4891,7 +4893,7 @@ def test_workflow_agent_non_streaming_without_content_recording(self, **kwargs): response = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="Test workflow", stream=False, ) @@ -4971,7 +4973,6 @@ def test_workflow_agent_streaming_with_content_recording(self, **kwargs): """Test workflow agent with streaming and content recording enabled.""" from azure.ai.projects.models import ( WorkflowAgentDefinition, - AgentReference, PromptAgentDefinition, ) @@ -5088,7 +5089,7 @@ def test_workflow_agent_streaming_with_content_recording(self, **kwargs): stream = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, ) @@ -5175,7 +5176,7 @@ def test_workflow_agent_streaming_with_content_recording(self, **kwargs): @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_workflow_agent_streaming_without_content_recording(self, **kwargs): """Test workflow agent with streaming and content recording disabled.""" - from azure.ai.projects.models import WorkflowAgentDefinition, AgentReference + from azure.ai.projects.models import WorkflowAgentDefinition self.cleanup() os.environ.update( @@ -5211,7 +5212,7 @@ def test_workflow_agent_streaming_without_content_recording(self, **kwargs): stream = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="Test workflow", stream=True, ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_async.py index be30ffd057bf..9c295e0f0772 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_async.py @@ -36,6 +36,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentor(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor with real endpoints (async).""" @@ -2948,7 +2951,7 @@ async def test_async_responses_stream_method_with_tools_without_content_recordin @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_async_workflow_agent_non_streaming_with_content_recording(self, **kwargs): """Test async workflow agent with non-streaming and content recording enabled.""" - from azure.ai.projects.models import WorkflowAgentDefinition, AgentReference + from azure.ai.projects.models import WorkflowAgentDefinition self.cleanup() os.environ.update( @@ -2986,7 +2989,7 @@ async def test_async_workflow_agent_non_streaming_with_content_recording(self, * response = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="Test workflow", stream=False, ) @@ -3065,7 +3068,7 @@ async def test_async_workflow_agent_non_streaming_with_content_recording(self, * @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_async_workflow_agent_non_streaming_without_content_recording(self, **kwargs): """Test async workflow agent with non-streaming and content recording disabled.""" - from azure.ai.projects.models import WorkflowAgentDefinition, AgentReference + from azure.ai.projects.models import WorkflowAgentDefinition self.cleanup() os.environ.update( @@ -3102,7 +3105,7 @@ async def test_async_workflow_agent_non_streaming_without_content_recording(self response = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="Test workflow", stream=False, ) @@ -3188,7 +3191,7 @@ async def test_async_workflow_agent_non_streaming_without_content_recording(self @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_async_workflow_agent_streaming_with_content_recording(self, **kwargs): """Test async workflow agent with streaming and content recording enabled.""" - from azure.ai.projects.models import WorkflowAgentDefinition, AgentReference + from azure.ai.projects.models import WorkflowAgentDefinition self.cleanup() os.environ.update( @@ -3225,7 +3228,7 @@ async def test_async_workflow_agent_streaming_with_content_recording(self, **kwa stream = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="Test workflow", stream=True, ) @@ -3309,7 +3312,7 @@ async def test_async_workflow_agent_streaming_with_content_recording(self, **kwa @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_async_workflow_agent_streaming_without_content_recording(self, **kwargs): """Test async workflow agent with streaming and content recording disabled.""" - from azure.ai.projects.models import WorkflowAgentDefinition, AgentReference + from azure.ai.projects.models import WorkflowAgentDefinition self.cleanup() os.environ.update( @@ -3346,7 +3349,7 @@ async def test_async_workflow_agent_streaming_without_content_recording(self, ** stream = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow_agent.name).as_dict()}, + extra_body={"agent": {"name": workflow_agent.name, "type": "agent_reference"}}, input="Test workflow", stream=True, ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation.py index 323a43dd0c6f..50a74dbeb08a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation.py @@ -30,6 +30,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorBrowserAutomation(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor with browser automation agents.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation_async.py index 87b89d8343f6..d15c9346f77b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_browser_automation_async.py @@ -33,6 +33,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorBrowserAutomationAsync(TestAiAgentsInstrumentorBase): """Async tests for ResponsesInstrumentor with browser automation agents.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter.py index df073393af8c..28d496482dad 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter.py @@ -35,6 +35,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorCodeInterpreter(TestAiAgentsInstrumentorBase): """ Test suite for Code Interpreter agent telemetry instrumentation. diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter_async.py index 850a2eb6e542..6881f72d160a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_code_interpreter_async.py @@ -36,6 +36,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorCodeInterpreterAsync(TestAiAgentsInstrumentorBase): """ Test suite for Code Interpreter agent telemetry instrumentation (async). diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search.py index 773f7c60614f..f60a732ebfdf 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search.py @@ -31,6 +31,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorFileSearch(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor with File Search tool.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search_async.py index 4e3e94308413..1fb973211813 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_file_search_async.py @@ -32,6 +32,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorFileSearchAsync(TestAiAgentsInstrumentorBase): """Async tests for ResponsesInstrumentor with File Search tool.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp.py index a0640b93e62d..36b1cec4c98c 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp.py @@ -31,6 +31,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorMCP(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor with MCP agents.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp_async.py index d0d4fb384984..2db3a44ac344 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_mcp_async.py @@ -32,6 +32,9 @@ _utils._span_impl_type = settings.tracing_implementation() +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorMCPAsync(TestAiAgentsInstrumentorBase): """Async tests for ResponsesInstrumentor with MCP agents.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_metrics.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_metrics.py index a198327679c3..4a285d42482b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_metrics.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_metrics.py @@ -29,6 +29,9 @@ metrics.set_meter_provider(global_meter_provider) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorMetrics(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor metrics functionality with real endpoints.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow.py index 5d9254d23c80..afa4428cb80e 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow.py @@ -19,7 +19,6 @@ from gen_ai_trace_verifier import GenAiTraceVerifier from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import ( - AgentReference, PromptAgentDefinition, WorkflowAgentDefinition, ) @@ -105,6 +104,9 @@ def checkInputMessageEventContents(content, content_recording_enabled): assert found_text, "No text part found in input message event" +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorWorkflow(TestAiAgentsInstrumentorBase): """Tests for ResponsesInstrumentor with workflow agents.""" @@ -243,10 +245,10 @@ def test_sync_workflow_non_streaming_with_content_recording(self, **kwargs): # Non-streaming request response = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=False, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Verify response has output @@ -408,10 +410,10 @@ def test_sync_workflow_non_streaming_without_content_recording(self, **kwargs): # Non-streaming request response = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=False, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Verify response has output @@ -575,10 +577,10 @@ def test_sync_workflow_streaming_with_content_recording(self, **kwargs): # Streaming request stream = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Consume stream @@ -743,10 +745,10 @@ def test_sync_workflow_streaming_without_content_recording(self, **kwargs): # Streaming request stream = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Consume stream diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow_async.py index 494c29e71fd3..c29871c62aa6 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor_workflow_async.py @@ -20,7 +20,6 @@ from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport from azure.ai.projects.models import ( - AgentReference, PromptAgentDefinition, WorkflowAgentDefinition, ) @@ -104,6 +103,9 @@ def checkInputMessageEventContents(content, content_recording_enabled): assert found_text, "No text part found in input message event" +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestResponsesInstrumentorWorkflowAsync(TestAiAgentsInstrumentorBase): """Async tests for ResponsesInstrumentor with workflow agents.""" @@ -242,10 +244,10 @@ async def test_async_workflow_non_streaming_with_content_recording(self, **kwarg # Non-streaming request response = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=False, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Verify response has output @@ -403,10 +405,10 @@ async def test_async_workflow_non_streaming_without_content_recording(self, **kw # Non-streaming request response = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=False, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Verify response has output @@ -568,10 +570,10 @@ async def test_async_workflow_streaming_with_content_recording(self, **kwargs): # Streaming request stream = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Consume stream @@ -734,10 +736,10 @@ async def test_async_workflow_streaming_without_content_recording(self, **kwargs # Streaming request stream = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=workflow.name).as_dict()}, + extra_body={"agent": {"name": workflow.name, "type": "agent_reference"}}, input="1 + 1 = ?", stream=True, - metadata={"x-ms-debug-mode-enabled": "1"}, + # Remove me? metadata={"x-ms-debug-mode-enabled": "1"}, ) # Consume stream diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator.py index 18a96fd8eae3..af3838e205fb 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator.py @@ -12,6 +12,9 @@ from memory_trace_exporter import MemoryTraceExporter +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestTraceFunctionDecorator: """Tests for trace_function decorator with synchronous functions.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator_async.py index dd7d92bed63c..a7f671e20691 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_trace_function_decorator_async.py @@ -12,6 +12,9 @@ from memory_trace_exporter import MemoryTraceExporter +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestTraceFunctionDecoratorAsync: """Tests for trace_function decorator with asynchronous functions.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py index 98f4ada367df..d8ddaf437b5a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py @@ -11,10 +11,14 @@ from azure.ai.projects.models import ( PromptAgentDefinition, TextResponseFormatJsonSchema, - PromptAgentDefinitionText, + PromptAgentDefinitionTextOptions, ) +import pytest +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentResponsesCrud(TestBase): # To run this test: @@ -137,7 +141,7 @@ def test_agent_responses_crud(self, **kwargs): # response = openai_client.responses.create( # conversation=conversation.id, - # extra_body={"agent": AgentReference(name=agent.name).as_dict()} + # extra_body={"agent": {"name": agent.name, "type": "agent_reference"}} # ) # print(f"Response id: {response.id}, output text: {response.output_text}") @@ -175,7 +179,7 @@ class CalendarEvent(BaseModel): agent_name="MyAgent", definition=PromptAgentDefinition( model=model, - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema(name="CalendarEvent", schema=CalendarEvent.model_json_schema()) ), instructions=""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py index 9a73410e6821..5d816cce67ce 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py @@ -12,10 +12,14 @@ from azure.ai.projects.models import ( PromptAgentDefinition, TextResponseFormatJsonSchema, - PromptAgentDefinitionText, + PromptAgentDefinitionTextOptions, ) +import pytest +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentResponsesCrudAsync(TestBase): @servicePreparer() @@ -108,7 +112,7 @@ async def test_agent_responses_crud_async(self, **kwargs): # response = await project_client.agents.responses.create( # conversation=conversation.id, - # extra_body={"agent": AgentReference(name=agent.name).as_dict()} + # extra_body={"agent": {"name": agent.name, "type": "agent_reference"}} # ) # print(f"Response id: {response.id}, output text: {response.output_text}") @@ -148,7 +152,7 @@ class CalendarEvent(BaseModel): agent_name="MyAgent", definition=PromptAgentDefinition( model=model, - text=PromptAgentDefinitionText( + text=PromptAgentDefinitionTextOptions( format=TextResponseFormatJsonSchema( name="CalendarEvent", schema=CalendarEvent.model_json_schema() ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud.py b/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud.py index 56f87bc64d01..d2d0714275d0 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud.py @@ -9,8 +9,12 @@ from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy from azure.ai.projects.models import PromptAgentDefinition, AgentDetails, AgentVersionDetails +import pytest +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentCrud(TestBase): # To run this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud_async.py index bd03ed0299ab..3f2679f6eba0 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agents_crud_async.py @@ -9,8 +9,12 @@ from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from azure.ai.projects.models import PromptAgentDefinition, AgentDetails, AgentVersionDetails +import pytest +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentCrudAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents.py b/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents.py index 1b304d401dd1..c847c1923899 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents.py @@ -11,9 +11,12 @@ from devtools_testutils import is_live_and_not_recording # from azure.ai.projects.models import ResponsesUserMessageItemParam -from azure.ai.projects.models import AgentReference, ContainerAppAgentDefinition, ProtocolVersionRecord, AgentProtocol +from azure.ai.projects.models import ContainerAppAgentDefinition, ProtocolVersionRecord, AgentProtocol +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestContainerAppAgents(TestBase): @servicePreparer() @@ -49,7 +52,7 @@ def test_container_app_agent(self, **kwargs): try: response = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=agent_version.name).as_dict()}, + extra_body={"agent": {"name": agent_version.name, "type": "agent_reference"}}, ) print(f"Response id: {response.id}, output text: {response.output_text}") assert "5280" in response.output_text or "5,280" in response.output_text diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents_async.py index 27a163db1757..bf4b150db65c 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_container_app_agents_async.py @@ -11,9 +11,12 @@ from devtools_testutils import is_live_and_not_recording # from azure.ai.projects.models import ResponsesUserMessageItemParam -from azure.ai.projects.models import AgentReference, ContainerAppAgentDefinition, ProtocolVersionRecord, AgentProtocol +from azure.ai.projects.models import ContainerAppAgentDefinition, ProtocolVersionRecord, AgentProtocol +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestContainerAppAgentsAsync(TestBase): @servicePreparer() @@ -49,7 +52,7 @@ async def test_container_app_agent_async(self, **kwargs): try: response = await openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": AgentReference(name=agent_version.name).as_dict()}, + extra_body={"agent": {"name": agent_version.name, "type": "agent_reference"}}, ) print(f"Response id: {response.id}, output text: {response.output_text}") assert "5280" in response.output_text or "5,280" in response.output_text diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud.py b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud.py index 636d1a012e51..83a6cc0dcc69 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud.py @@ -7,11 +7,15 @@ from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport +import pytest # from azure.ai.projects.models import ResponsesUserMessageItemParam, ItemContentInputText # TODO: Emitter did not produce the output class OpenAI.ConversationResource. Validating service response as Dict for now. +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestConversationCrud(TestBase): # To run only this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud_async.py index 9905b34b5c16..29520fabc727 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_crud_async.py @@ -8,10 +8,14 @@ from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport +import pytest # from azure.ai.projects.models import ResponsesUserMessageItemParam, ItemContentInputText +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestConversationCrudAsync(TestBase): # To run only this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py index 95aff3fac063..c510b7210ce2 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py @@ -7,8 +7,12 @@ from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport +import pytest +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestConversationItemsCrud(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py index f6b0379aeab8..703154357d1a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py @@ -8,8 +8,12 @@ from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport +import pytest +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestConversationItemsCrudAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_hosted_agents.py b/sdk/ai/azure-ai-projects/tests/agents/test_hosted_agents.py index 9959c9df10fd..11b2012cc7b7 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_hosted_agents.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_hosted_agents.py @@ -6,39 +6,17 @@ # cSpell:disable from test_base import TestBase # , servicePreparer +import pytest # from devtools_testutils import recorded_by_proxy -# from azure.ai.projects.models import AgentReference, PromptAgentDefinition +# from azure.ai.projects.models import PromptAgentDefinition +@pytest.mark.skip(reason="Not yet implemented") class TestHostedAgents(TestBase): # @servicePreparer() # @recorded_by_proxy def test_hosted_agent(self, **kwargs): - """ - Test Hosted Agents and all container operations. - - Routes used in this test: - - Action REST API Route Client Method - ------+---------------------------------------------------------------------------+----------------------------------- - - # Setup: - - # Test focus: - GET /agents/{agent_name}/operations list_container_operations - GET /agents/{agent_name}/operations/{operation_id} retrieve_container_operation - GET /agents/{agent_name}/versions/{agent_version}/containers/default retrieve_container - GET /agents/{agent_name}/versions/{agent_version}/containers/default/operations list_version_container_operations - POST /agents/{agent_name}/versions/{agent_version}/containers/default:start start_container - POST /agents/{agent_name}/versions/{agent_version}/containers/default:stop stop_container - POST /agents/{agent_name}/versions/{agent_version}/containers/default:update update_container - POST /agents/{agent_name}/versions/{agent_version}/containers/default:delete delete_container - - # Teardown: - - """ - # TODO: Add tests! pass diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py index 2601424c4a1b..58d0163f3e14 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py @@ -5,6 +5,8 @@ # ------------------------------------ # cSpell:disable +import pytest + """ Multi-Tool Tests: Code Interpreter + Function Tool @@ -24,6 +26,9 @@ from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentCodeInterpreterAndFunction(TestBase): """Tests for agents using Code Interpreter + Function Tool combination.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py index a60340a6571b..b5c1ada0057f 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py @@ -5,6 +5,8 @@ # ------------------------------------ # cSpell:disable +import pytest + """ Multi-Tool Tests: File Search + Code Interpreter @@ -23,6 +25,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearchAndCodeInterpreter(TestBase): """Tests for agents using File Search + Code Interpreter combination.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_function.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_function.py index 6bf4a5e9a5a9..fb7f74c537e9 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_function.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_function.py @@ -5,6 +5,8 @@ # ------------------------------------ # cSpell:disable +import pytest + """ Multi-Tool Tests: File Search + Function Tool @@ -20,6 +22,9 @@ from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearchAndFunction(TestBase): """Tests for agents using File Search + Function Tool combination.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py index 1b4bd4462be5..29d7caa9b412 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py @@ -5,6 +5,8 @@ # ------------------------------------ # cSpell:disable +import pytest + """ Multi-Tool Tests: File Search + Code Interpreter + Function Tool @@ -26,6 +28,9 @@ from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearchCodeInterpreterFunction(TestBase): """Tests for agents using File Search + Code Interpreter + Function Tool.""" diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_multitool_with_conversations.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_multitool_with_conversations.py index 607acbb678ad..51b1d9426348 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_multitool_with_conversations.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_multitool_with_conversations.py @@ -3,6 +3,8 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest + """ Test agents using multiple tools within conversations. @@ -22,6 +24,9 @@ from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestMultiToolWithConversations(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py index 0668d960b8ad..d97bc133306d 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py @@ -20,6 +20,9 @@ # https://arxiv.org/pdf/2508.03680 +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentAISearch(TestBase): # Test questions with expected answers diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py index a6f25b30a756..92fda91873c0 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py @@ -21,6 +21,9 @@ # https://arxiv.org/pdf/2508.03680 +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentAISearchAsync(TestBase): # Test questions with expected answers diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py index 9881a1a4a6d0..9cf4076a9be9 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py @@ -16,6 +16,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentBingGrounding(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py index 13337934bfd4..696ac9f4c373 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py @@ -16,6 +16,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentCodeInterpreter(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py index 6932f282830b..a4ca2f8837af 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py @@ -5,6 +5,7 @@ # ------------------------------------ # cSpell:disable +import pytest from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport @@ -15,6 +16,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentCodeInterpreterAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search.py index d3b56c75e710..59c074f565a4 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search.py @@ -13,6 +13,9 @@ from azure.ai.projects.models import PromptAgentDefinition, FileSearchTool +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearch(TestBase): # To only run this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_async.py index e249222339f7..ae30e7e037a3 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_async.py @@ -6,6 +6,7 @@ # cSpell:disable import os +import pytest from io import BytesIO from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -13,6 +14,9 @@ from azure.ai.projects.models import PromptAgentDefinition, FileSearchTool +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearchAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream.py index 6ebbf719a9a4..c0ed973ef2c2 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream.py @@ -6,11 +6,15 @@ # cSpell:disable import os +import pytest from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import PromptAgentDefinition, FileSearchTool +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearchStream(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream_async.py index afa42b523b70..e668ef5474f9 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_file_search_stream_async.py @@ -6,12 +6,16 @@ # cSpell:disable import os +import pytest from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport from azure.ai.projects.models import PromptAgentDefinition, FileSearchTool +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFileSearchStreamAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool.py index a4a4ced9ee1f..8b215d7560fe 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool.py @@ -6,12 +6,16 @@ # cSpell:disable import json +import pytest from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import PromptAgentDefinition, FunctionTool from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFunctionTool(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool_async.py index 5102005b2656..eb0600fc4c4a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_function_tool_async.py @@ -6,6 +6,7 @@ # cSpell:disable import json +import pytest from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport @@ -13,6 +14,9 @@ from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentFunctionToolAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation.py index 6d5ae36d069e..d34c6e497238 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation.py @@ -14,6 +14,9 @@ from azure.core.exceptions import ResourceNotFoundError +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentImageGeneration(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation_async.py index ab31ff0cc827..16529f228f65 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_image_generation_async.py @@ -15,6 +15,9 @@ from azure.core.exceptions import ResourceNotFoundError +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentImageGenerationAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp.py index d202c31aa34e..bb3d9a306907 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp.py @@ -13,6 +13,9 @@ from openai.types.responses.response_input_param import McpApprovalResponse, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentMCP(TestBase): # To run only this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp_async.py index adc9bbde3419..8ac7732e4978 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_mcp_async.py @@ -5,6 +5,7 @@ # ------------------------------------ # cSpell:disable +import pytest from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport @@ -12,6 +13,9 @@ from openai.types.responses.response_input_param import McpApprovalResponse, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentMCPAsync(TestBase): # To run only this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py index bef4555d04fb..dce1f809d4e1 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py @@ -5,12 +5,14 @@ # ------------------------------------ # cSpell:disable +import pytest import time from typing import Final from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport, is_live, is_live_and_not_recording from azure.core.exceptions import ResourceNotFoundError from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemorySearchPreviewTool, PromptAgentDefinition, @@ -18,6 +20,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentMemorySearch(TestBase): @servicePreparer() @@ -81,7 +86,9 @@ def test_agent_memory_search(self, **kwargs): # in live mode so we don't get logs of this call in test recordings. if is_live_and_not_recording(): try: - project_client.memory_stores.delete(memory_store_name) + project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -97,6 +104,7 @@ def test_agent_memory_search(self, **kwargs): name=memory_store_name, description="Test memory store for agent conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"\nMemory store created: {memory_store.name} (id: {memory_store.id})") assert memory_store.name == memory_store_name @@ -206,7 +214,9 @@ def test_agent_memory_search(self, **kwargs): if memory_store: try: - project_client.memory_stores.delete(memory_store.name) + project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print("Memory store deleted") except Exception as e: print(f"Failed to delete memory store: {e}") diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py index 4b9e64c8ce02..fd9e06519f15 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py @@ -6,12 +6,14 @@ # cSpell:disable import asyncio +import pytest from typing import Final from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport, is_live, is_live_and_not_recording from azure.core.exceptions import ResourceNotFoundError from azure.ai.projects.models import ( + FoundryFeaturesOptInKeys, MemoryStoreDefaultDefinition, MemorySearchPreviewTool, PromptAgentDefinition, @@ -19,6 +21,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentMemorySearchAsync(TestBase): @servicePreparer() @@ -56,7 +61,9 @@ async def test_agent_memory_search_async(self, **kwargs): # in live mode so we don't get logs of this call in test recordings. if is_live_and_not_recording(): try: - await project_client.memory_stores.delete(memory_store_name) + await project_client.memory_stores.delete( + memory_store_name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print(f"Memory store `{memory_store_name}` deleted") except ResourceNotFoundError: pass @@ -72,6 +79,7 @@ async def test_agent_memory_search_async(self, **kwargs): name=memory_store_name, description="Test memory store for agent conversations", definition=definition, + foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW, ) print(f"\nMemory store created: {memory_store.name} (id: {memory_store.id})") assert memory_store.name == memory_store_name @@ -181,7 +189,9 @@ async def test_agent_memory_search_async(self, **kwargs): if memory_store: try: - await project_client.memory_stores.delete(memory_store.name) + await project_client.memory_stores.delete( + memory_store.name, foundry_features=FoundryFeaturesOptInKeys.MEMORY_STORES_V1_PREVIEW + ) print("Memory store deleted") except Exception as e: print(f"Failed to delete memory store: {e}") diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py index 4a862ff31b5a..b9c3f1b289a1 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py @@ -19,6 +19,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentOpenApi(TestBase): # To run this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py index d56ea15d7c52..5934f77faa4b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py @@ -20,6 +20,9 @@ ) +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentOpenApiAsync(TestBase): # To run this test: diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py index 636e85370bb4..c15e0ced6dc1 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py @@ -12,6 +12,7 @@ """ import json +import pytest from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import ( @@ -24,6 +25,9 @@ from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentToolsWithConversations(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search.py index b93038521e1d..501da235ece3 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search.py @@ -5,11 +5,15 @@ # ------------------------------------ # cSpell:disable +import pytest from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import PromptAgentDefinition, WebSearchPreviewTool, ApproximateLocation +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentWebSearch(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search_async.py index 5c7c0ff5b46b..99b0bdbfda1b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_web_search_async.py @@ -5,12 +5,16 @@ # ------------------------------------ # cSpell:disable +import pytest from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport from azure.ai.projects.models import PromptAgentDefinition, WebSearchPreviewTool, ApproximateLocation +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestAgentWebSearchAsync(TestBase): @servicePreparer() diff --git a/sdk/ai/azure-ai-projects/tests/connections/test_connections.py b/sdk/ai/azure-ai-projects/tests/connections/test_connections.py index 968fe4d7a503..f916f2d1b94f 100644 --- a/sdk/ai/azure-ai-projects/tests/connections/test_connections.py +++ b/sdk/ai/azure-ai-projects/tests/connections/test_connections.py @@ -3,10 +3,14 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestConnections(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/connections/test_connections_async.py b/sdk/ai/azure-ai-projects/tests/connections/test_connections_async.py index b98cdacad711..12fce861fb5d 100644 --- a/sdk/ai/azure-ai-projects/tests/connections/test_connections_async.py +++ b/sdk/ai/azure-ai-projects/tests/connections/test_connections_async.py @@ -3,11 +3,15 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects.aio import AIProjectClient from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestConnectionsAsync(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/datasets/test_datasets.py b/sdk/ai/azure-ai-projects/tests/datasets/test_datasets.py index ea816d596fc9..a64874ec090f 100644 --- a/sdk/ai/azure-ai-projects/tests/datasets/test_datasets.py +++ b/sdk/ai/azure-ai-projects/tests/datasets/test_datasets.py @@ -20,6 +20,9 @@ data_file2 = os.path.join(data_folder, "data_file2.txt") +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestDatasets(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/datasets/test_datasets_async.py b/sdk/ai/azure-ai-projects/tests/datasets/test_datasets_async.py index 6400df3c369d..3aafccd1f837 100644 --- a/sdk/ai/azure-ai-projects/tests/datasets/test_datasets_async.py +++ b/sdk/ai/azure-ai-projects/tests/datasets/test_datasets_async.py @@ -21,6 +21,9 @@ data_file2 = os.path.join(data_folder, "data_file2.txt") +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestDatasetsAsync(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/deployments/test_deployments.py b/sdk/ai/azure-ai-projects/tests/deployments/test_deployments.py index 8bc90ee53a4c..549e91e220b9 100644 --- a/sdk/ai/azure-ai-projects/tests/deployments/test_deployments.py +++ b/sdk/ai/azure-ai-projects/tests/deployments/test_deployments.py @@ -3,11 +3,15 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects import AIProjectClient from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestDeployments(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/deployments/test_deployments_async.py b/sdk/ai/azure-ai-projects/tests/deployments/test_deployments_async.py index 92549800faa2..54bea9b8d3cf 100644 --- a/sdk/ai/azure-ai-projects/tests/deployments/test_deployments_async.py +++ b/sdk/ai/azure-ai-projects/tests/deployments/test_deployments_async.py @@ -3,11 +3,15 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects.aio import AIProjectClient from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestDeploymentsAsync(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/files/test_files.py b/sdk/ai/azure-ai-projects/tests/files/test_files.py index f934ce955547..27661719fa00 100644 --- a/sdk/ai/azure-ai-projects/tests/files/test_files.py +++ b/sdk/ai/azure-ai-projects/tests/files/test_files.py @@ -10,6 +10,9 @@ from devtools_testutils import recorded_by_proxy, RecordedTransport +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestFiles(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/files/test_files_async.py b/sdk/ai/azure-ai-projects/tests/files/test_files_async.py index cc85b778e1a5..f9fe2f7acb59 100644 --- a/sdk/ai/azure-ai-projects/tests/files/test_files_async.py +++ b/sdk/ai/azure-ai-projects/tests/files/test_files_async.py @@ -11,6 +11,9 @@ from devtools_testutils import RecordedTransport +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestFilesAsync(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py index 6a265f3ada48..d8e28452557f 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py @@ -60,6 +60,9 @@ def _wrapper(test_class, job_type, expected_method_type, **kwargs): return _wrapper +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestFineTuning(TestBase): def _create_sft_finetuning_job(self, openai_client, train_file_id, validation_file_id, training_type, model_type): diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py index 06838474a2c9..be0cc2de95dc 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py @@ -63,6 +63,9 @@ async def _wrapper(test_class, job_type, expected_method_type, **kwargs): return _wrapper +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestFineTuningAsync(TestBase): async def _create_sft_finetuning_job_async( diff --git a/sdk/ai/azure-ai-projects/tests/indexes/test_indexes.py b/sdk/ai/azure-ai-projects/tests/indexes/test_indexes.py index eb22ca9ff27b..1210c6142926 100644 --- a/sdk/ai/azure-ai-projects/tests/indexes/test_indexes.py +++ b/sdk/ai/azure-ai-projects/tests/indexes/test_indexes.py @@ -4,12 +4,16 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects import AIProjectClient from azure.ai.projects.models import AzureAISearchIndex, IndexType from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestIndexes(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/indexes/test_indexes_async.py b/sdk/ai/azure-ai-projects/tests/indexes/test_indexes_async.py index eb88e0d9a915..70b7b73fb3b3 100644 --- a/sdk/ai/azure-ai-projects/tests/indexes/test_indexes_async.py +++ b/sdk/ai/azure-ai-projects/tests/indexes/test_indexes_async.py @@ -4,12 +4,16 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import AzureAISearchIndex, IndexType from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestIndexesAsync(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/redteams/test_redteams.py b/sdk/ai/azure-ai-projects/tests/redteams/test_redteams.py index 2aefc6cef437..eca364b4997c 100644 --- a/sdk/ai/azure-ai-projects/tests/redteams/test_redteams.py +++ b/sdk/ai/azure-ai-projects/tests/redteams/test_redteams.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( RedTeam, @@ -14,6 +15,9 @@ from devtools_testutils import recorded_by_proxy +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestRedTeams(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/redteams/test_redteams_async.py b/sdk/ai/azure-ai-projects/tests/redteams/test_redteams_async.py index d6809d2d785b..87b1ee306f4f 100644 --- a/sdk/ai/azure-ai-projects/tests/redteams/test_redteams_async.py +++ b/sdk/ai/azure-ai-projects/tests/redteams/test_redteams_async.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. # ------------------------------------ +import pytest from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( RedTeam, @@ -14,6 +15,9 @@ from devtools_testutils.aio import recorded_by_proxy_async +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestRedTeams(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/responses/test_responses.py b/sdk/ai/azure-ai-projects/tests/responses/test_responses.py index a41d3d7505d6..c8129bedc624 100644 --- a/sdk/ai/azure-ai-projects/tests/responses/test_responses.py +++ b/sdk/ai/azure-ai-projects/tests/responses/test_responses.py @@ -45,6 +45,9 @@ class TestResponses(TestBase): # To run this test: # pytest tests\responses\test_responses.py::TestResponses::test_responses -s + @pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" + ) @servicePreparer() @recorded_by_proxy(RecordedTransport.HTTPX) def test_responses(self, **kwargs): diff --git a/sdk/ai/azure-ai-projects/tests/responses/test_responses_async.py b/sdk/ai/azure-ai-projects/tests/responses/test_responses_async.py index bf7252962dad..2bfbaa92926b 100644 --- a/sdk/ai/azure-ai-projects/tests/responses/test_responses_async.py +++ b/sdk/ai/azure-ai-projects/tests/responses/test_responses_async.py @@ -41,6 +41,9 @@ class TestResponsesAsync(TestBase): # To run this test: # pytest tests\responses\test_responses_async.py::TestResponsesAsync::test_responses_async -s + @pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" + ) @servicePreparer() @recorded_by_proxy_async(RecordedTransport.HTTPX) async def test_responses_async(self, **kwargs): diff --git a/sdk/ai/azure-ai-projects/tests/responses/test_responses_with_http_client_override.py b/sdk/ai/azure-ai-projects/tests/responses/test_responses_with_http_client_override.py new file mode 100644 index 000000000000..46da53d9994f --- /dev/null +++ b/sdk/ai/azure-ai-projects/tests/responses/test_responses_with_http_client_override.py @@ -0,0 +1,104 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Tests to verify that a custom http_client can be passed to get_openai_client() +and that the returned OpenAI client uses it instead of the default one. +""" + +import os +import pytest +import httpx +from typing import Any +from azure.core.credentials import TokenCredential +from azure.ai.projects import AIProjectClient + + +class DummyTokenCredential(TokenCredential): + """A dummy credential that returns None for testing purposes.""" + + def get_token(self, *scopes: str, **kwargs: Any): # type: ignore[override] + return None + + +@pytest.fixture(autouse=True) +def patch_openai(monkeypatch): + """Ensure no real network/token calls are made during the test.""" + monkeypatch.setattr("azure.ai.projects._patch.get_bearer_token_provider", lambda *_, **__: "token-provider") + + +@pytest.mark.skipif( + os.environ.get("AZURE_AI_PROJECTS_CONSOLE_LOGGING", "false").lower() == "true", + reason="Test skipped because AZURE_AI_PROJECTS_CONSOLE_LOGGING is set to 'true'", +) +class TestResponsesWithHttpClientOverride: + """Tests for custom http_client override in get_openai_client().""" + + def test_custom_http_client_is_used(self): + """ + Test that a custom http_client passed to get_openai_client() is actually used + by the returned OpenAI client when making API calls. + """ + # Track whether our custom http_client was invoked + request_intercepted = {"called": False, "request": None} + + class TrackingTransport(httpx.BaseTransport): + """Custom transport that tracks requests and returns mock responses.""" + + def handle_request(self, request: httpx.Request) -> httpx.Response: + # Mark that our custom transport was called + request_intercepted["called"] = True + request_intercepted["request"] = request + + # Return a mock response for the OpenAI responses.create() call + return httpx.Response( + 200, + request=request, + json={ + "id": "resp_test_123", + "output": [ + { + "type": "message", + "id": "msg_test_123", + "role": "assistant", + "content": [ + { + "type": "output_text", + "text": "This is a test response from the mock.", + } + ], + } + ], + }, + ) + + # Create a custom http_client with our tracking transport + custom_http_client = httpx.Client(transport=TrackingTransport()) + + # Create the AIProjectClient + project_client = AIProjectClient( + endpoint="https://example.com/api/projects/test", + credential=DummyTokenCredential(), + ) + + # Get an OpenAI client with our custom http_client + openai_client = project_client.get_openai_client(http_client=custom_http_client) + + # Make an API call + response = openai_client.responses.create( + model="gpt-4o", + input="Test input", + ) + + # Verify the custom http_client was used + assert request_intercepted["called"], "Custom http_client was not used for the request" + assert request_intercepted["request"] is not None, "Request was not captured" + + # Verify the request was made to the expected endpoint + assert "/openai/v1/responses" in str(request_intercepted["request"].url) + + # Verify we got a valid response + assert response.id == "resp_test_123" + assert response.output_text == "This is a test response from the mock." diff --git a/sdk/ai/azure-ai-projects/tests/responses/test_responses_with_http_client_override_async.py b/sdk/ai/azure-ai-projects/tests/responses/test_responses_with_http_client_override_async.py new file mode 100644 index 000000000000..ea6ab052613b --- /dev/null +++ b/sdk/ai/azure-ai-projects/tests/responses/test_responses_with_http_client_override_async.py @@ -0,0 +1,108 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Tests to verify that a custom http_client can be passed to get_openai_client() +and that the returned AsyncOpenAI client uses it instead of the default one. +""" + +import os +import pytest +import httpx +from typing import Any +from azure.core.credentials_async import AsyncTokenCredential +from azure.ai.projects.aio import AIProjectClient + + +class DummyAsyncTokenCredential(AsyncTokenCredential): + """A dummy async credential that returns None for testing purposes.""" + + async def get_token(self, *scopes: str, **kwargs: Any): # type: ignore[override] + return None + + async def close(self) -> None: + pass + + +@pytest.fixture(autouse=True) +def patch_openai(monkeypatch): + """Ensure no real network/token calls are made during the test.""" + monkeypatch.setattr("azure.ai.projects.aio._patch.get_bearer_token_provider", lambda *_, **__: "token-provider") + + +@pytest.mark.skipif( + os.environ.get("AZURE_AI_PROJECTS_CONSOLE_LOGGING", "false").lower() == "true", + reason="Test skipped because AZURE_AI_PROJECTS_CONSOLE_LOGGING is set to 'true'", +) +class TestResponsesWithHttpClientOverrideAsync: + """Tests for custom http_client override in async get_openai_client().""" + + @pytest.mark.asyncio + async def test_custom_http_client_is_used(self): + """ + Test that a custom http_client passed to get_openai_client() is actually used + by the returned AsyncOpenAI client when making API calls. + """ + # Track whether our custom http_client was invoked + request_intercepted = {"called": False, "request": None} + + class TrackingTransport(httpx.AsyncBaseTransport): + """Custom async transport that tracks requests and returns mock responses.""" + + async def handle_async_request(self, request: httpx.Request) -> httpx.Response: + # Mark that our custom transport was called + request_intercepted["called"] = True + request_intercepted["request"] = request + + # Return a mock response for the OpenAI responses.create() call + return httpx.Response( + 200, + request=request, + json={ + "id": "resp_test_123", + "output": [ + { + "type": "message", + "id": "msg_test_123", + "role": "assistant", + "content": [ + { + "type": "output_text", + "text": "This is a test response from the mock.", + } + ], + } + ], + }, + ) + + # Create a custom http_client with our tracking transport + custom_http_client = httpx.AsyncClient(transport=TrackingTransport()) + + # Create the AIProjectClient + project_client = AIProjectClient( + endpoint="https://example.com/api/projects/test", + credential=DummyAsyncTokenCredential(), + ) + + # Get an AsyncOpenAI client with our custom http_client + openai_client = project_client.get_openai_client(http_client=custom_http_client) + + # Make an API call + response = await openai_client.responses.create( + model="gpt-4o", + input="Test input", + ) + + # Verify the custom http_client was used + assert request_intercepted["called"], "Custom http_client was not used for the request" + assert request_intercepted["request"] is not None, "Request was not captured" + + # Verify the request was made to the expected endpoint + assert "/openai/v1/responses" in str(request_intercepted["request"].url) + + # Verify we got a valid response + assert response.id == "resp_test_123" + assert response.output_text == "This is a test response from the mock." diff --git a/sdk/ai/azure-ai-projects/tests/samples/test_samples.py b/sdk/ai/azure-ai-projects/tests/samples/test_samples.py index 999b884f865e..52fc5ae46a37 100644 --- a/sdk/ai/azure-ai-projects/tests/samples/test_samples.py +++ b/sdk/ai/azure-ai-projects/tests/samples/test_samples.py @@ -16,6 +16,9 @@ from test_samples_helpers import agent_tools_instructions, get_sample_environment_variables_map +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestSamples(AzureRecordedTestCase): # To run this test with a specific sample, use: @@ -41,6 +44,9 @@ class TestSamples(AzureRecordedTestCase): "sample_agent_mcp_with_project_connection.py", "sample_agent_openapi_with_project_connection.py", "sample_agent_to_agent.py", + "sample_agent_web_search.py", + "sample_agent_web_search_preview.py", + "sample_agent_web_search_with_custom_search.py", ], ), ) @@ -53,4 +59,5 @@ def test_agent_tools_samples(self, sample_path: str, **kwargs) -> None: executor.validate_print_calls_by_llm( instructions=agent_tools_instructions, project_endpoint=kwargs["azure_ai_project_endpoint"], + model=kwargs["azure_ai_model_deployment_name"], ) diff --git a/sdk/ai/azure-ai-projects/tests/samples/test_samples_async.py b/sdk/ai/azure-ai-projects/tests/samples/test_samples_async.py index d836afd2351e..69b5b2cbd45b 100644 --- a/sdk/ai/azure-ai-projects/tests/samples/test_samples_async.py +++ b/sdk/ai/azure-ai-projects/tests/samples/test_samples_async.py @@ -15,6 +15,9 @@ from test_samples_helpers import agent_tools_instructions, get_sample_environment_variables_map +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestSamplesAsync(AzureRecordedTestCase): """Async test cases for samples.""" @@ -25,7 +28,7 @@ class TestSamplesAsync(AzureRecordedTestCase): "sample_path", get_async_sample_paths( "agents/tools", - samples_to_skip=["sample_agent_mcp_with_project_connection_async.py"], + samples_to_skip=[], ), ) @SamplePathPasser() @@ -42,4 +45,5 @@ async def test_agent_tools_samples_async(self, sample_path: str, **kwargs) -> No await executor.validate_print_calls_by_llm_async( instructions=agent_tools_instructions, project_endpoint=kwargs["azure_ai_project_endpoint"], + model=kwargs["azure_ai_model_deployment_name"], ) diff --git a/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py b/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py index d3370ffebb19..09dcdd61e73c 100644 --- a/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py +++ b/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py @@ -41,6 +41,9 @@ Always respond with `reason` indicating the reason for the response.""" +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestSamplesEvaluations(AzureRecordedTestCase): """ Tests for evaluation samples. diff --git a/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry.py b/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry.py index 9ade1692ae0a..6e74aafa4016 100644 --- a/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry.py +++ b/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry.py @@ -9,6 +9,9 @@ from devtools_testutils import recorded_by_proxy, is_live +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestTelemetry(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry_async.py b/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry_async.py index d0aee2d61e4b..1273bf410413 100644 --- a/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry_async.py +++ b/sdk/ai/azure-ai-projects/tests/telemetry/test_telemetry_async.py @@ -10,6 +10,9 @@ from devtools_testutils import is_live +@pytest.mark.skip( + reason="Skipped until re-enabled and recorded on Foundry endpoint that supports the new versioning schema" +) class TestTelemetryAsync(TestBase): # To run this test, use the following command in the \sdk\ai\azure-ai-projects folder: diff --git a/sdk/ai/azure-ai-projects/tests/test_base.py b/sdk/ai/azure-ai-projects/tests/test_base.py index b1b7e0e1d668..fe6b6fbcb0aa 100644 --- a/sdk/ai/azure-ai-projects/tests/test_base.py +++ b/sdk/ai/azure-ai-projects/tests/test_base.py @@ -25,7 +25,6 @@ Index, IndexType, InputContentType, - ItemResource, InputItemType, ModelDeployment, ) diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml new file mode 100644 index 000000000000..9f9400d68c59 --- /dev/null +++ b/sdk/ai/azure-ai-projects/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/ai-foundry/data-plane/Foundry +commit: ac1aa168fb5e530b01ef31c525dcb0848aeb6fbc +repo: Azure/azure-rest-api-specs +additionalDirectories: