From cd0c48aefa7979d476879067fde16a7b3817891c Mon Sep 17 00:00:00 2001 From: OpeOginni Date: Fri, 20 Feb 2026 23:51:57 +0100 Subject: [PATCH 1/3] feat(tui): add Azure provider configuration instructions to login command --- packages/opencode/src/cli/cmd/auth.ts | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/packages/opencode/src/cli/cmd/auth.ts b/packages/opencode/src/cli/cmd/auth.ts index e050a0abf803..8a273e7f8f98 100644 --- a/packages/opencode/src/cli/cmd/auth.ts +++ b/packages/opencode/src/cli/cmd/auth.ts @@ -388,6 +388,24 @@ export const AuthLoginCommand = cmd({ ) } + if (provider === "azure") { + prompts.log.info( + "Azure OpenAI requires AZURE_RESOURCE_NAME (your Azure resource name).\n" + + "Example: AZURE_RESOURCE_NAME=XXX opencode\n" + + "Or add to your shell profile: export AZURE_RESOURCE_NAME=XXX\n" + + "Docs: https://opencode.ai/docs/providers/#azure-openai", + ) + } + + if (provider === "azure-cognitive-services") { + prompts.log.info( + "Azure Cognitive Services requires AZURE_COGNITIVE_SERVICES_RESOURCE_NAME (your Azure resource name).\n" + + "Example: AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX opencode\n" + + "Or add to your shell profile: export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX\n" + + "Docs: https://opencode.ai/docs/providers/#azure-cognitive-services", + ) + } + if (provider === "opencode") { prompts.log.info("Create an api key at https://opencode.ai/auth") } From 7f692b1748826e477b94212e3e7f7e4c60e49e93 Mon Sep 17 00:00:00 2001 From: OpeOginni Date: Sat, 21 Feb 2026 02:23:50 +0100 Subject: [PATCH 2/3] feat(docs): update tip to add opencode.json setup --- packages/opencode/src/cli/cmd/auth.ts | 2 ++ packages/web/src/content/docs/providers.mdx | 30 +++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/opencode/src/cli/cmd/auth.ts b/packages/opencode/src/cli/cmd/auth.ts index 8a273e7f8f98..c43915c05c6d 100644 --- a/packages/opencode/src/cli/cmd/auth.ts +++ b/packages/opencode/src/cli/cmd/auth.ts @@ -393,6 +393,7 @@ export const AuthLoginCommand = cmd({ "Azure OpenAI requires AZURE_RESOURCE_NAME (your Azure resource name).\n" + "Example: AZURE_RESOURCE_NAME=XXX opencode\n" + "Or add to your shell profile: export AZURE_RESOURCE_NAME=XXX\n" + + "You can also set provider.azure.options.resourceName in opencode.json\n" + "Docs: https://opencode.ai/docs/providers/#azure-openai", ) } @@ -402,6 +403,7 @@ export const AuthLoginCommand = cmd({ "Azure Cognitive Services requires AZURE_COGNITIVE_SERVICES_RESOURCE_NAME (your Azure resource name).\n" + "Example: AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX opencode\n" + "Or add to your shell profile: export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX\n" + + "You can also set provider.azure-cognitive-services.options.resourceName in opencode.json\n" + "Docs: https://opencode.ai/docs/providers/#azure-cognitive-services", ) } diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index db3bfeaeebeb..132126f4e546 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -342,6 +342,21 @@ If you encounter "I'm sorry, but I cannot assist with that request" errors, try export AZURE_RESOURCE_NAME=XXX ``` + Or set it in your `opencode.json` config: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Run the `/models` command to select your deployed model. ```txt @@ -389,6 +404,21 @@ If you encounter "I'm sorry, but I cannot assist with that request" errors, try export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` + Or set it in your `opencode.json` config: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Run the `/models` command to select your deployed model. ```txt From e9802ca3f5928d2cf09672c2dd44133ec0d28c16 Mon Sep 17 00:00:00 2001 From: OpeOginni Date: Sat, 21 Feb 2026 14:43:09 +0100 Subject: [PATCH 3/3] feat(docs): enhance Azure provider instructions for non-OpenAI models and error handling --- .../web/src/content/docs/ar/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/bs/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/da/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/de/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/es/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/fr/providers.mdx | 40 +++++++++++++- .../web/src/content/docs/it/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/ja/providers.mdx | 55 ++++++++++++++++++- .../web/src/content/docs/ko/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/nb/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/pl/providers.mdx | 51 +++++++++++++++++ packages/web/src/content/docs/providers.mdx | 49 ++++++++++++----- .../web/src/content/docs/pt-br/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/ru/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/th/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/tr/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/zh-cn/providers.mdx | 51 +++++++++++++++++ .../web/src/content/docs/zh-tw/providers.mdx | 51 +++++++++++++++++ 18 files changed, 891 insertions(+), 18 deletions(-) diff --git a/packages/web/src/content/docs/ar/providers.mdx b/packages/web/src/content/docs/ar/providers.mdx index 1d448986f31e..8f2516e6a546 100644 --- a/packages/web/src/content/docs/ar/providers.mdx +++ b/packages/web/src/content/docs/ar/providers.mdx @@ -294,6 +294,8 @@ OpenCode Zen هي قائمة نماذج يوفّرها فريق OpenCode وقد إذا واجهت أخطاء "I'm sorry, but I cannot assist with that request"، فجرّب تغيير مرشح المحتوى من **DefaultV2** إلى **Default** في مورد Azure الخاص بك. ::: +إذا كنت تستخدم نماذج غير OpenAI أو ترى أخطاء، ففضل موفّر [Azure Cognitive Services](#azure-cognitive-services) مع `@ai-sdk/openai-compatible` و`options.baseURL`؛ إذا كنت تستخدم نماذج OpenAI بشكل أساسي، فهذا الموفّر جيد. + 1. توجّه إلى [Azure portal](https://portal.azure.com/) وأنشئ موردا من نوع **Azure OpenAI**. ستحتاج إلى: - **Resource name**: يصبح جزءا من نقطة نهاية API لديك (`https://RESOURCE_NAME.openai.azure.com/`) - **API key**: إما `KEY 1` أو `KEY 2` من موردك @@ -331,6 +333,21 @@ OpenCode Zen هي قائمة نماذج يوفّرها فريق OpenCode وقد export AZURE_RESOURCE_NAME=XXX ``` + أو قم بتجاوزها في ملف إعدادات `opencode.json`: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. شغّل الأمر `/models` لاختيار النموذج الذي قمت بنشره. ```txt @@ -378,6 +395,40 @@ OpenCode Zen هي قائمة نماذج يوفّرها فريق OpenCode وقد export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +استخدم إعداد التجاوز أدناه للنماذج غير OpenAI أو الأخطاء المتكررة. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +يستمر استخدام مفتاح API من `/connect`. + +:::tip +هل تصل إلى حدود المعدل؟ قلل `models..limit.output` للبقاء تحت حدود Azure للرموز في الدقيقة. +::: + 6. شغّل الأمر `/models` لاختيار النموذج الذي قمت بنشره. ```txt diff --git a/packages/web/src/content/docs/bs/providers.mdx b/packages/web/src/content/docs/bs/providers.mdx index 2415cda27785..e9c686785ff8 100644 --- a/packages/web/src/content/docs/bs/providers.mdx +++ b/packages/web/src/content/docs/bs/providers.mdx @@ -299,6 +299,8 @@ Ili ako već imate API ključ, možete odabrati **Ručno unesite API ključ** i Ako naiđete na greške "Žao mi je, ali ne mogu pomoći s tim zahtjevom", pokušajte promijeniti filter sadržaja iz **DefaultV2** u **Default** u vašem Azure resursu. ::: +Ako koristite modele koji nisu OpenAI ili vidite greške, preferirajte provajdera [Azure Cognitive Services](#azure-cognitive-services) sa `@ai-sdk/openai-compatible` i `options.baseURL`; ako uglavnom koristite OpenAI modele, ovaj provajder je u redu. + 1. Idite na [Azure portal](https://portal.azure.com/) i kreirajte **Azure OpenAI** resurs. trebat će vam: - **Naziv resursa**: Ovo postaje dio vaše krajnje tačke API-ja (`https://RESOURCE_NAME.openai.azure.com/`) - **API ključ**: Ili `KEY 1` ili `KEY 2` sa vašeg izvora @@ -336,6 +338,21 @@ Ili ga dodajte na svoj bash profil: export AZURE_RESOURCE_NAME=XXX ``` +Ili ga poništite u vašoj `opencode.json` konfiguraciji: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } +} +``` + 6. Pokrenite naredbu `/models` da odaberete svoj raspoređeni model. ```txt @@ -383,6 +400,40 @@ Ili ga dodajte na svoj bash profil: export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Koristite konfiguraciju nadjačavanja u nastavku za modele koji nisu OpenAI ili greške koje se ponavljaju. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API ključ se i dalje koristi. + +:::tip +Dostižete ograničenja brzine? Smanjite `models..limit.output` da ostanete ispod Azure ograničenja tokena po minuti. +::: + 6. Pokrenite naredbu `/models` da odaberete svoj raspoređeni model. ```txt diff --git a/packages/web/src/content/docs/da/providers.mdx b/packages/web/src/content/docs/da/providers.mdx index 829ae46134bf..999a316c27f2 100644 --- a/packages/web/src/content/docs/da/providers.mdx +++ b/packages/web/src/content/docs/da/providers.mdx @@ -290,6 +290,8 @@ Eller hvis du allerede har en API-nøgle, kan du vælge **Manually enter API Key Hvis du støder på "Beklager, men jeg kan ikke hjælpe med den anmodning"-fejl, kan du prøve at ændre indholdsfilteret fra **DefaultV2** til **Default** i Azure-ressourcen. ::: +Hvis du bruger ikke-OpenAI-modeller eller ser fejl, foretræk [Azure Cognitive Services](#azure-cognitive-services) udbyderen med `@ai-sdk/openai-compatible` og `options.baseURL`; hvis du mest bruger OpenAI-modeller, er denne udbyder fin. + 1. Gå til [Azure-portalen](https://portal.azure.com/) og opret en **Azure OpenAI**-ressource. Du skal bruge: - **Ressourcenavn**: Dette bliver en del af API-endpointet (`https://RESOURCE_NAME.openai.azure.com/`) - **API-nøgle**: Enten `KEY 1` eller `KEY 2` fra din ressource @@ -327,6 +329,21 @@ Hvis du støder på "Beklager, men jeg kan ikke hjælpe med den anmodning"-fejl, export AZURE_RESOURCE_NAME=XXX ``` + Eller tilsidesæt det i din `opencode.json`-konfiguration: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Kør kommandoen `/models` for at vælge den distribuerede model. ```txt @@ -374,6 +391,40 @@ Hvis du støder på "Beklager, men jeg kan ikke hjælpe med den anmodning"-fejl, export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Brug tilsidesættelseskonfigurationen nedenfor til ikke-OpenAI-modeller eller tilbagevendende fejl. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API-nøglen bruges stadig. + +:::tip +Rammer du hastighedsbegrænsninger? Reducer `models..limit.output` for at blive under Azure token-per-minut grænser. +::: + 6. Kør kommandoen `/models` for at vælge den distribuerede model. ```txt diff --git a/packages/web/src/content/docs/de/providers.mdx b/packages/web/src/content/docs/de/providers.mdx index d72ac5af3de4..7ab02badd022 100644 --- a/packages/web/src/content/docs/de/providers.mdx +++ b/packages/web/src/content/docs/de/providers.mdx @@ -296,6 +296,8 @@ Wenn Sie bereits über einen API-Schlüssel verfügen, können Sie **API-Schlüs Wenn Sie auf die Fehlermeldung „Es tut mir leid, aber ich kann Ihnen bei dieser Anfrage nicht weiterhelfen“ stoßen, versuchen Sie, den Inhaltsfilter in Ihrer Azure-Ressource von **DefaultV2** in **Default** zu ändern. ::: +Wenn Sie Nicht-OpenAI-Modelle verwenden oder Fehler sehen, bevorzugen Sie den Anbieter [Azure Cognitive Services](#azure-cognitive-services) mit `@ai-sdk/openai-compatible` und `options.baseURL`; wenn Sie hauptsächlich OpenAI-Modelle verwenden, ist dieser Anbieter in Ordnung. + 1. Gehen Sie zu [Azure portal](https://portal.azure.com/) und erstellen Sie eine **Azure OpenAI**-Ressource. Sie benötigen: - **Ressourcenname**: Dies wird Teil Ihres API-Endpunkts (`https://RESOURCE_NAME.openai.azure.com/`) - **API-Schlüssel**: Entweder `KEY 1` oder `KEY 2` aus Ihrer Ressource @@ -333,6 +335,21 @@ Wenn Sie auf die Fehlermeldung „Es tut mir leid, aber ich kann Ihnen bei diese export AZURE_RESOURCE_NAME=XXX ``` + Oder überschreiben Sie es in Ihrer `opencode.json`-Konfiguration: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Führen Sie den Befehl `/models` aus, um Ihr bereitgestelltes Modell auszuwählen. ```txt @@ -380,6 +397,40 @@ Wenn Sie auf die Fehlermeldung „Es tut mir leid, aber ich kann Ihnen bei diese export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Verwenden Sie die untenstehende Überschreibungskonfiguration für Nicht-OpenAI-Modelle oder wiederkehrende Fehler. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +Der `/connect` API-Schlüssel wird weiterhin verwendet. + +:::tip +Erreichen Sie Ratenlimits? Reduzieren Sie `models..limit.output`, um unter den Azure-Token-pro-Minute-Obergrenzen zu bleiben. +::: + 6. Führen Sie den Befehl `/models` aus, um Ihr bereitgestelltes Modell auszuwählen. ```txt diff --git a/packages/web/src/content/docs/es/providers.mdx b/packages/web/src/content/docs/es/providers.mdx index 8d86612538bf..852360dbd4cb 100644 --- a/packages/web/src/content/docs/es/providers.mdx +++ b/packages/web/src/content/docs/es/providers.mdx @@ -296,6 +296,8 @@ O si ya tienes una clave API, puedes seleccionar **Ingresar manualmente la clave Si encuentra errores del tipo "Lo siento, pero no puedo ayudar con esa solicitud", intente cambiar el filtro de contenido de **DefaultV2** a **Default** en su recurso de Azure. ::: +Si usa modelos que no son de OpenAI o ve errores, prefiera el proveedor [Azure Cognitive Services](#azure-cognitive-services) con `@ai-sdk/openai-compatible` y `options.baseURL`; si usa principalmente modelos de OpenAI, este proveedor está bien. + 1. Diríjase al [portal de Azure](https://portal.azure.com/) y cree un recurso **Azure OpenAI**. Necesitarás: - **Nombre del recurso**: esto pasa a formar parte de su punto final API (`https://RESOURCE_NAME.openai.azure.com/`) - **Clave API**: `KEY 1` o `KEY 2` de su recurso @@ -333,6 +335,21 @@ Si encuentra errores del tipo "Lo siento, pero no puedo ayudar con esa solicitud export AZURE_RESOURCE_NAME=XXX ``` + O anúlelo en su configuración de `opencode.json`: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Ejecute el comando `/models` para seleccionar su modelo implementado. ```txt @@ -380,6 +397,40 @@ Si encuentra errores del tipo "Lo siento, pero no puedo ayudar con esa solicitud export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Utilice la configuración de anulación a continuación para modelos que no son de OpenAI o errores recurrentes. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +Se sigue utilizando la clave API de `/connect`. + +:::tip +¿Alcanza los límites de velocidad? Reduzca `models..limit.output` para mantenerse por debajo de los límites de tokens por minuto de Azure. +::: + 6. Ejecute el comando `/models` para seleccionar su modelo implementado. ```txt diff --git a/packages/web/src/content/docs/fr/providers.mdx b/packages/web/src/content/docs/fr/providers.mdx index b65e9c00a154..9be09329065a 100644 --- a/packages/web/src/content/docs/fr/providers.mdx +++ b/packages/web/src/content/docs/fr/providers.mdx @@ -299,6 +299,8 @@ Ou si vous disposez déjà d'une clé API, vous pouvez sélectionner **Entrer ma Si vous rencontrez des erreurs « Je suis désolé, mais je ne peux pas vous aider avec cette demande », essayez de modifier le filtre de contenu de **DefaultV2** à **Default** dans votre ressource Azure. ::: +Si vous utilisez des modèles non-OpenAI ou rencontrez des erreurs, préférez le fournisseur [Azure Cognitive Services](#azure-cognitive-services) avec `@ai-sdk/openai-compatible` et `options.baseURL` ; si vous utilisez principalement des modèles OpenAI, ce fournisseur convient très bien. + 1. Rendez-vous sur le [portail Azure](https://portal.azure.com/) et créez une ressource **Azure OpenAI**. Vous aurez besoin de : - **Nom de la ressource** : cela fait partie de votre point de terminaison API (`https://RESOURCE_NAME.openai.azure.com/`) - **Clé API** : soit `KEY 1` ou `KEY 2` de votre ressource @@ -377,12 +379,46 @@ Ou ajoutez-le à votre profil bash : AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX opencode ``` -Ou ajoutez-le à votre profil bash : + Ou ajoutez-le à votre profil bash : -```bash title="~/.bash_profile" + ```bash title="~/.bash_profile" export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX + ``` + +:::note +Utilisez la configuration de remplacement ci-dessous pour les modèles non-OpenAI ou les erreurs récurrentes. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} ``` +La clé API `/connect` est toujours utilisée. + +:::tip +Vous atteignez les limites de débit ? Réduisez `models..limit.output` pour rester sous les limites de jetons par minute d'Azure. +::: + 6. Exécutez la commande `/models` pour sélectionner votre modèle déployé. ```txt diff --git a/packages/web/src/content/docs/it/providers.mdx b/packages/web/src/content/docs/it/providers.mdx index 9b4c07b665d3..36c38a58bd62 100644 --- a/packages/web/src/content/docs/it/providers.mdx +++ b/packages/web/src/content/docs/it/providers.mdx @@ -305,6 +305,8 @@ Oppure se hai già una chiave API, puoi selezionare **Manually enter API Key** e Se incontri errori "I'm sorry, but I cannot assist with that request", prova a cambiare il filtro contenuti da **DefaultV2** a **Default** nella tua risorsa Azure. ::: +Se usi modelli non-OpenAI o vedi errori, preferisci il provider [Azure Cognitive Services](#azure-cognitive-services) con `@ai-sdk/openai-compatible` e `options.baseURL`; se usi principalmente modelli OpenAI, questo provider va bene. + 1. Vai al [portale Azure](https://portal.azure.com/) e crea una risorsa **Azure OpenAI**. Ti serviranno: - **Resource name**: Diventa parte del tuo endpoint API (`https://RESOURCE_NAME.openai.azure.com/`) - **API key**: O `KEY 1` o `KEY 2` dalla tua risorsa @@ -342,6 +344,21 @@ Se incontri errori "I'm sorry, but I cannot assist with that request", prova a c export AZURE_RESOURCE_NAME=XXX ``` + Oppure sovrascrivilo nella tua configurazione `opencode.json`: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Esegui il comando `/models` per selezionare il tuo modello deployato. ```txt @@ -389,6 +406,40 @@ Se incontri errori "I'm sorry, but I cannot assist with that request", prova a c export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Usa la configurazione di override qui sotto per modelli non-OpenAI o errori ricorrenti. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +La chiave API `/connect` è ancora usata. + +:::tip +Raggiungi i limiti di velocità? Riduci `models..limit.output` per rimanere sotto i limiti di token al minuto di Azure. +::: + 6. Esegui il comando `/models` per selezionare il tuo modello deployato. ```txt diff --git a/packages/web/src/content/docs/ja/providers.mdx b/packages/web/src/content/docs/ja/providers.mdx index 2602f8ef22ad..0f2b189f7746 100644 --- a/packages/web/src/content/docs/ja/providers.mdx +++ b/packages/web/src/content/docs/ja/providers.mdx @@ -299,6 +299,8 @@ Pro/Max サブスクリプションをお持ちでない場合は、[**API キ 「申し訳ありませんが、そのリクエストには対応できません」エラーが発生した場合は、Azure リソースのコンテンツフィルターを **DefaultV2** から **Default** に変更してみてください。 ::: +OpenAI 以外のモデルを使用している場合やエラーが表示される場合は、`@ai-sdk/openai-compatible` と `options.baseURL` を使用する [Azure Cognitive Services](#azure-cognitive-services) プロバイダーを推奨します。主に OpenAI モデルを使用している場合は、このプロバイダーで問題ありません。 + 1. [Azure portal](https://portal.azure.com/) に移動し、**Azure OpenAI** リソースを作成します。必要なものは次のとおりです。 - **リソース名**: これは API エンドポイント (`https://RESOURCE_NAME.openai.azure.com/`) の一部になります。 - **API キー**: リソースの `KEY 1` または `KEY 2` のいずれか @@ -333,7 +335,22 @@ OpenCode が正しく動作するには、デプロイメント名がモデル または、bash プロファイルに追加します。 ```bash title="~/.bash_profile" - export AZURE_RESOURCE_NAME=XXX +export AZURE_RESOURCE_NAME=XXX +``` + +または、`opencode.json` 設定で上書きします: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } +} ``` 6. `/models` コマンドを実行して、デプロイされたモデルを選択します。 @@ -380,9 +397,43 @@ OpenCode が正しく動作するには、デプロイメント名がモデル または、bash プロファイルに追加します。 ```bash title="~/.bash_profile" - export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX +export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +OpenAI 以外のモデルや繰り返し発生するエラーについては、以下のオーバーライド設定を使用してください。 +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API キーは引き続き使用されます。 + +:::tip +レート制限に達しましたか? `models..limit.output` を減らして、Azure の 1 分あたりのトークン上限を下回るようにしてください。 +::: + 6. `/models` コマンドを実行して、デプロイされたモデルを選択します。 ```txt diff --git a/packages/web/src/content/docs/ko/providers.mdx b/packages/web/src/content/docs/ko/providers.mdx index ea48dbfb0a9a..12481bf084d2 100644 --- a/packages/web/src/content/docs/ko/providers.mdx +++ b/packages/web/src/content/docs/ko/providers.mdx @@ -303,6 +303,8 @@ Pro/Max 구독이 없는 경우 **Create an API Key**를 선택할 수 있습니 "I'm sorry, but I can't support that request" 오류가 발생하면, Azure 리소스의 콘텐츠 필터를 **DefaultV2**에서 **Default**로 변경해 보세요. ::: +OpenAI가 아닌 모델을 사용하거나 오류가 표시되는 경우 `@ai-sdk/openai-compatible` 및 `options.baseURL`이 포함된 [Azure Cognitive Services](#azure-cognitive-services) 공급자를 선호하세요. 주로 OpenAI 모델을 사용하는 경우 이 공급자가 좋습니다. + 1. [Azure 포털](https://portal.azure.com/)로 이동하여 **Azure OpenAI** 리소스를 만듭니다. 다음이 필요합니다: - **리소스 이름**: API 엔드포인트의 일부가 됩니다 (`https://RESOURCE_NAME.openai.azure.com/`) - **API 키**: 리소스의 `KEY 1` 또는 `KEY 2` @@ -340,6 +342,21 @@ Pro/Max 구독이 없는 경우 **Create an API Key**를 선택할 수 있습니 export AZURE_RESOURCE_NAME=XXX ``` + 또는 `opencode.json` 구성에서 재정의하세요: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. `/models` 명령을 실행하여 배포된 모델을 선택하십시오. ```txt @@ -387,6 +404,40 @@ Pro/Max 구독이 없는 경우 **Create an API Key**를 선택할 수 있습니 export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +OpenAI가 아닌 모델이나 반복되는 오류에는 아래의 재정의 구성을 사용하십시오. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API 키는 여전히 사용됩니다. + +:::tip +속도 제한에 도달했습니까? Azure 분당 토큰 한도 미만으로 유지하려면 `models..limit.output`을 줄이십시오. +::: + 6. `/models` 명령을 실행하여 배포된 모델을 선택하십시오. ```txt diff --git a/packages/web/src/content/docs/nb/providers.mdx b/packages/web/src/content/docs/nb/providers.mdx index 58d325cab8c9..5cf785f0b0c8 100644 --- a/packages/web/src/content/docs/nb/providers.mdx +++ b/packages/web/src/content/docs/nb/providers.mdx @@ -296,6 +296,8 @@ Eller hvis du allerede har en API-nøkkel, kan du velge **Angi API-nøkkel manue Hvis du støter på «Beklager, men jeg kan ikke hjelpe med den forespørselen»-feil, kan du prøve å endre innholdsfilteret fra **DefaultV2** til **Default** i Azure-ressursen. ::: +Hvis du bruker ikke-OpenAI-modeller eller ser feil, foretrekk [Azure Cognitive Services](#azure-cognitive-services)-leverandøren med `@ai-sdk/openai-compatible` og `options.baseURL`; hvis du hovedsakelig bruker OpenAI-modeller, er denne leverandøren grei. + 1. Gå over til [Azure-portalen](https://portal.azure.com/) og lag en **Azure OpenAI**-ressurs. Du trenger: - **Ressursnavn**: Dette blir en del av API-endepunktet (`https://RESOURCE_NAME.openai.azure.com/`) - **API nøkkel**: Enten `KEY 1` eller `KEY 2` fra ressursen din @@ -333,6 +335,21 @@ Hvis du støter på «Beklager, men jeg kan ikke hjelpe med den forespørselen» export AZURE_RESOURCE_NAME=XXX ``` + Eller overstyr den i din `opencode.json`-konfigurasjon: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Kjør kommandoen `/models` for å velge den distribuerte modellen. ```txt @@ -380,6 +397,40 @@ Hvis du støter på «Beklager, men jeg kan ikke hjelpe med den forespørselen» export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Bruk overstyringskonfigurasjonen nedenfor for ikke-OpenAI-modeller eller tilbakevendende feil. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API-nøkkelen brukes fortsatt. + +:::tip +Når du hastighetsgrenser? Reduser `models..limit.output` for å holde deg under Azure token-per-minutt-grenser. +::: + 6. Kjør kommandoen `/models` for å velge den distribuerte modellen. ```txt diff --git a/packages/web/src/content/docs/pl/providers.mdx b/packages/web/src/content/docs/pl/providers.mdx index 58f824764efa..0044cae1b5f3 100644 --- a/packages/web/src/content/docs/pl/providers.mdx +++ b/packages/web/src/content/docs/pl/providers.mdx @@ -296,6 +296,8 @@ Lub jeśli masz już klucz API, możesz wybrać **Wprowadź klucz API ręcznie** Jeśli napotkasz błędy „Przykro mi, ale nie mogę pomóc w tej prośbie”, spróbuj zmienić filtr zawartości z **DefaultV2** na **Default** w zasobie platformy Azure. ::: +Jeśli używasz modeli innych niż OpenAI lub widzisz błędy, preferuj dostawcę [Azure Cognitive Services](#azure-cognitive-services) z `@ai-sdk/openai-compatible` i `options.baseURL`; jeśli używasz głównie modeli OpenAI, ten dostawca jest w porządku. + 1. Przejdź do [Azure portal](https://portal.azure.com/) i utwórz zasób **Azure OpenAI**. Będziesz potrzebować: - **Nazwa zasobu**: staje się częścią punktu końcowego API (`https://RESOURCE_NAME.openai.azure.com/`) - **Klucz API**: `KEY 1` lub `KEY 2` z Twojego zasobu @@ -333,6 +335,21 @@ Jeśli napotkasz błędy „Przykro mi, ale nie mogę pomóc w tej prośbie”, export AZURE_RESOURCE_NAME=XXX ``` + Lub nadpisz go w swojej konfiguracji `opencode.json`: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Uruchom komendę `/models`, aby wybrać wdrożony model. ```txt @@ -380,6 +397,40 @@ Jeśli napotkasz błędy „Przykro mi, ale nie mogę pomóc w tej prośbie”, export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Użyj poniższej konfiguracji nadpisania dla modeli innych niż OpenAI lub powtarzających się błędów. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +Klucz API `/connect` jest nadal używany. + +:::tip +Osiągasz limity szybkości? Zmniejsz `models..limit.output`, aby pozostać poniżej limitów tokenów na minutę Azure. +::: + 6. Uruchom komendę `/models`, aby wybrać wdrożony model. ```txt diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index 132126f4e546..31003ecac0fd 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -305,6 +305,8 @@ Or if you already have an API key, you can select **Manually enter API Key** and If you encounter "I'm sorry, but I cannot assist with that request" errors, try changing the content filter from **DefaultV2** to **Default** in your Azure resource. ::: +If you use non-OpenAI models or see errors, prefer the [Azure Cognitive Services](#azure-cognitive-services) provider with `@ai-sdk/openai-compatible` and `options.baseURL`; if you mostly use OpenAI models, this provider is fine. + 1. Head over to the [Azure portal](https://portal.azure.com/) and create an **Azure OpenAI** resource. You'll need: - **Resource name**: This becomes part of your API endpoint (`https://RESOURCE_NAME.openai.azure.com/`) - **API key**: Either `KEY 1` or `KEY 2` from your resource @@ -342,7 +344,7 @@ If you encounter "I'm sorry, but I cannot assist with that request" errors, try export AZURE_RESOURCE_NAME=XXX ``` - Or set it in your `opencode.json` config: + Or override it in your `opencode.json` config: ```json title="opencode.json" { @@ -404,20 +406,39 @@ If you encounter "I'm sorry, but I cannot assist with that request" errors, try export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` - Or set it in your `opencode.json` config: +:::note +Use the override config below for non-OpenAI models or recurring errors. +::: - ```json title="opencode.json" - { - "$schema": "https://opencode.ai/config.json", - "provider": { - "azure-cognitive-services": { - "options": { - "resourceName": "xxx" - } - } - } - } - ``` +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +The `/connect` API key is still used. + +:::tip +Hitting rate limits? Reduce `models..limit.output` to stay under Azure token-per-minute caps. +::: 6. Run the `/models` command to select your deployed model. diff --git a/packages/web/src/content/docs/pt-br/providers.mdx b/packages/web/src/content/docs/pt-br/providers.mdx index 43f2e385f133..d630b35d7651 100644 --- a/packages/web/src/content/docs/pt-br/providers.mdx +++ b/packages/web/src/content/docs/pt-br/providers.mdx @@ -290,6 +290,8 @@ Ou, se você já tiver uma chave da API, pode selecionar **Inserir manualmente a Se você encontrar erros "Desculpe, mas não posso ajudar com esse pedido", tente mudar o filtro de conteúdo de **DefaultV2** para **Default** em seu recurso Azure. ::: +Se você usar modelos que não sejam da OpenAI ou encontrar erros, prefira o provedor [Azure Cognitive Services](#azure-cognitive-services) com `@ai-sdk/openai-compatible` e `options.baseURL`; se você usar principalmente modelos da OpenAI, este provedor está bem. + 1. Acesse o [portal Azure](https://portal.azure.com/) e crie um recurso **Azure OpenAI**. Você precisará: - **Nome do recurso**: Isso se torna parte do seu endpoint da API (`https://RESOURCE_NAME.openai.azure.com/`) - **Chave da API**: Seja `KEY 1` ou `KEY 2` do seu recurso @@ -327,6 +329,21 @@ Se você encontrar erros "Desculpe, mas não posso ajudar com esse pedido", tent export AZURE_RESOURCE_NAME=XXX ``` + Ou substitua na sua configuração `opencode.json`: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Execute o comando `/models` para selecionar seu modelo implantado. ```txt @@ -374,6 +391,40 @@ Se você encontrar erros "Desculpe, mas não posso ajudar com esse pedido", tent export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Use a configuração de substituição abaixo para modelos que não sejam da OpenAI ou erros recorrentes. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +A chave da API `/connect` ainda é usada. + +:::tip +Atingindo limites de taxa? Reduza `models..limit.output` para ficar abaixo dos limites de token por minuto do Azure. +::: + 6. Execute o comando `/models` para selecionar seu modelo implantado. ```txt diff --git a/packages/web/src/content/docs/ru/providers.mdx b/packages/web/src/content/docs/ru/providers.mdx index 5984c89f43bc..f78d578cc30a 100644 --- a/packages/web/src/content/docs/ru/providers.mdx +++ b/packages/web/src/content/docs/ru/providers.mdx @@ -295,6 +295,8 @@ OpenCode Zen — это список моделей, предоставленн Если вы столкнулись с ошибками «Извините, но я не могу помочь с этим запросом», попробуйте изменить фильтр содержимого с **DefaultV2** на **Default** в своем ресурсе Azure. ::: +Если вы используете модели, отличные от OpenAI, или видите ошибки, отдайте предпочтение провайдеру [Azure Cognitive Services](#azure-cognitive-services) с `@ai-sdk/openai-compatible` и `options.baseURL`; если вы в основном используете модели OpenAI, этот провайдер подойдет. + 1. Перейдите на [портал Azure](https://portal.azure.com/) и создайте ресурс **Azure OpenAI**. Вам понадобится: - **Имя ресурса**: оно становится частью вашей конечной точки API (`https://RESOURCE_NAME.openai.azure.com/`). - **Ключ API**: `KEY 1` или `KEY 2` из вашего ресурса. @@ -332,6 +334,21 @@ OpenCode Zen — это список моделей, предоставленн export AZURE_RESOURCE_NAME=XXX ``` + Или переопределите его в конфигурации `opencode.json`: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Запустите команду `/models`, чтобы выбрать развернутую модель. ```txt @@ -379,6 +396,40 @@ OpenCode Zen — это список моделей, предоставленн export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +Используйте конфигурацию переопределения ниже для моделей, отличных от OpenAI, или повторяющихся ошибок. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +Ключ API `/connect` все еще используется. + +:::tip +Достигаете лимитов скорости? Уменьшите `models..limit.output`, чтобы оставаться ниже лимитов токенов в минуту Azure. +::: + 6. Запустите команду `/models`, чтобы выбрать развернутую модель. ```txt diff --git a/packages/web/src/content/docs/th/providers.mdx b/packages/web/src/content/docs/th/providers.mdx index e9fbb351d7df..73330e601ff3 100644 --- a/packages/web/src/content/docs/th/providers.mdx +++ b/packages/web/src/content/docs/th/providers.mdx @@ -296,6 +296,8 @@ OpenCode Zen คือรายชื่อโมเดลที่จัดท หากคุณพบข้อผิดพลาด "ฉันขอโทษ แต่ฉันไม่สามารถช่วยเหลือคำขอนั้นได้" ให้ลองเปลี่ยนตัวกรองเนื้อหาจาก **DefaultV2** เป็น **Default** ในทรัพยากร Azure ของคุณ ::: +หากคุณใช้โมเดลที่ไม่ใช่ OpenAI หรือพบข้อผิดพลาด ให้เลือกผู้ให้บริการ [Azure Cognitive Services](#azure-cognitive-services) ที่มี `@ai-sdk/openai-compatible` และ `options.baseURL` หากคุณใช้โมเดล OpenAI เป็นหลัก ผู้ให้บริการรายนี้ก็ใช้ได้ + 1. ไปที่ [พอร์ทัล Azure](https://portal.azure.com/) และสร้างทรัพยากร **Azure OpenAI** คุณจะต้องการ: - **ชื่อทรัพยากร**: นี่จะกลายเป็นส่วนหนึ่งของจุดสิ้นสุด API ของคุณ (`https://RESOURCE_NAME.openai.azure.com/`) - **API key**: `KEY 1` หรือ `KEY 2` จากทรัพยากรของคุณ @@ -333,6 +335,21 @@ OpenCode Zen คือรายชื่อโมเดลที่จัดท export AZURE_RESOURCE_NAME=XXX ``` + หรือแทนที่ในการกำหนดค่า `opencode.json` ของคุณ: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. รันคำสั่ง `/models` เพื่อเลือกโมเดลที่ปรับใช้ของคุณ ```txt @@ -380,6 +397,40 @@ OpenCode Zen คือรายชื่อโมเดลที่จัดท export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +ใช้การกำหนดค่าแทนที่ด้านล่างสำหรับโมเดลที่ไม่ใช่ OpenAI หรือข้อผิดพลาดที่เกิดซ้ำ +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +คีย์ API `/connect` ยังคงถูกใช้งานอยู่ + +:::tip +ถึงขีดจำกัดอัตราแล้วใช่ไหม? ลด `models..limit.output` เพื่อให้อยู่ภายใต้ขีดจำกัดโทเค็นต่อนาทีของ Azure +::: + 6. รันคำสั่ง `/models` เพื่อเลือกโมเดลที่ปรับใช้ของคุณ ```txt diff --git a/packages/web/src/content/docs/tr/providers.mdx b/packages/web/src/content/docs/tr/providers.mdx index efe5ff9afa48..cde76ed8ae90 100644 --- a/packages/web/src/content/docs/tr/providers.mdx +++ b/packages/web/src/content/docs/tr/providers.mdx @@ -298,6 +298,8 @@ Veya zaten bir API anahtarınız varsa **API Anahtarını Manuel Olarak Girin** "Üzgünüm ama bu isteğe yardımcı olamıyorum" hatalarıyla karşılaşırsanız Azure kaynağınızda içerik filtresini **DefaultV2** yerine **Default** olarak değiştirmeyi deneyin. ::: +OpenAI olmayan modeller kullanıyorsanız veya hatalar görüyorsanız, `@ai-sdk/openai-compatible` ve `options.baseURL` ile [Azure Cognitive Services](#azure-cognitive-services) sağlayıcısını tercih edin; çoğunlukla OpenAI modelleri kullanıyorsanız, bu sağlayıcı iyidir. + 1. [Azure portal](https://portal.azure.com/)'a gidin ve bir **Azure OpenAI** kaynağı oluşturun. İhtiyacınız olacak: - **Kaynak adı**: Bu, API bitiş noktanızın (`https://RESOURCE_NAME.openai.azure.com/`) parçası olur - **API anahtarı**: Kaynağınızdan `KEY 1` veya `KEY 2` @@ -335,6 +337,21 @@ Veya zaten bir API anahtarınız varsa **API Anahtarını Manuel Olarak Girin** export AZURE_RESOURCE_NAME=XXX ``` + Veya `opencode.json` yapılandırmanızda geçersiz kılın: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. Dağıtılan modelinizi seçmek için `/models` komutunu çalıştırın. ```txt @@ -382,6 +399,40 @@ Veya zaten bir API anahtarınız varsa **API Anahtarını Manuel Olarak Girin** export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +OpenAI olmayan modeller veya tekrarlayan hatalar için aşağıdaki geçersiz kılma yapılandırmasını kullanın. +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API anahtarı hala kullanılıyor. + +:::tip +Hız sınırlarına mı ulaşıyorsunuz? Azure dakika başına token sınırlarının altında kalmak için `models..limit.output` değerini azaltın. +::: + 6. Dağıtılan modelinizi seçmek için `/models` komutunu çalıştırın. ```txt diff --git a/packages/web/src/content/docs/zh-cn/providers.mdx b/packages/web/src/content/docs/zh-cn/providers.mdx index ccc2bf7d406b..fbe98c4aac53 100644 --- a/packages/web/src/content/docs/zh-cn/providers.mdx +++ b/packages/web/src/content/docs/zh-cn/providers.mdx @@ -291,6 +291,8 @@ OpenCode Zen 是由 OpenCode 团队提供的模型列表,这些模型已经过 如果遇到 "I'm sorry, but I cannot assist with that request" 错误,请尝试将 Azure 资源中的内容过滤器从 **DefaultV2** 更改为 **Default**。 ::: +如果您使用非 OpenAI 模型或遇到错误,请优先使用 [Azure Cognitive Services](#azure-cognitive-services) 提供商,并配合 `@ai-sdk/openai-compatible` 和 `options.baseURL`;如果您主要使用 OpenAI 模型,则此提供商即可。 + 1. 前往 [Azure 门户](https://portal.azure.com/)并创建 **Azure OpenAI** 资源。你需要: - **资源名称**:这会成为你的 API 端点的一部分(`https://RESOURCE_NAME.openai.azure.com/`) - **API 密钥**:资源中的 `KEY 1` 或 `KEY 2` @@ -328,6 +330,21 @@ OpenCode Zen 是由 OpenCode 团队提供的模型列表,这些模型已经过 export AZURE_RESOURCE_NAME=XXX ``` + 或者在您的 `opencode.json` 配置中覆盖它: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. 执行 `/models` 命令选择你已部署的模型。 ```txt @@ -375,6 +392,40 @@ OpenCode Zen 是由 OpenCode 团队提供的模型列表,这些模型已经过 export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +对于非 OpenAI 模型或反复出现的错误,请使用以下覆盖配置。 +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API 密钥仍会被使用。 + +:::tip +遇到速率限制?降低 `models..limit.output` 以保持在 Azure 每分钟令牌数上限之下。 +::: + 6. 执行 `/models` 命令选择你已部署的模型。 ```txt diff --git a/packages/web/src/content/docs/zh-tw/providers.mdx b/packages/web/src/content/docs/zh-tw/providers.mdx index 12c4ded4e368..a7245664e64f 100644 --- a/packages/web/src/content/docs/zh-tw/providers.mdx +++ b/packages/web/src/content/docs/zh-tw/providers.mdx @@ -291,6 +291,8 @@ OpenCode Zen 是由 OpenCode 團隊提供的模型列表,這些模型已經過 如果遇到 "I'm sorry, but I cannot assist with that request" 錯誤,請嘗試將 Azure 資源中的內容篩選器從 **DefaultV2** 更改為 **Default**。 ::: +如果您使用非 OpenAI 模型或看到錯誤,請優先使用 [Azure Cognitive Services](#azure-cognitive-services) 提供商,並搭配 `@ai-sdk/openai-compatible` 和 `options.baseURL`;如果您主要使用 OpenAI 模型,則此提供商即可。 + 1. 前往 [Azure 入口網站](https://portal.azure.com/)並建立 **Azure OpenAI** 資源。您需要: - **資源名稱**:這會成為您的 API 端點的一部分(`https://RESOURCE_NAME.openai.azure.com/`) - **API 金鑰**:資源中的 `KEY 1` 或 `KEY 2` @@ -328,6 +330,21 @@ OpenCode Zen 是由 OpenCode 團隊提供的模型列表,這些模型已經過 export AZURE_RESOURCE_NAME=XXX ``` + 或者在您的 `opencode.json` 設定中覆蓋它: + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure": { + "options": { + "resourceName": "xxx" + } + } + } + } + ``` + 6. 執行 `/models` 指令選擇您已部署的模型。 ```txt @@ -375,6 +392,40 @@ OpenCode Zen 是由 OpenCode 團隊提供的模型列表,這些模型已經過 export AZURE_COGNITIVE_SERVICES_RESOURCE_NAME=XXX ``` +:::note +對於非 OpenAI 模型或反覆出現的錯誤,請使用以下覆蓋設定。 +::: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "azure-cognitive-services": { + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://.openai.azure.com/openai/v1", + "useCompletionUrls": true // If getting "This model is not supported by Responses API" error + }, + "models": { + "kimi-k2.5": { + "id": "Kimi-K2.5", + "limit": { + "output": 10000, // Reduce limit.output for models where you get rate-limit errors + "context": 262144 + } + } + } + } + } +} +``` + +`/connect` API 金鑰仍會被使用。 + +:::tip +遇到速率限制?降低 `models..limit.output` 以保持在 Azure 每分鐘權杖上限之下。 +::: + 6. 執行 `/models` 指令選擇您已部署的模型。 ```txt