From d20431263b1c1382a35f4f37093b880aeb3417ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=B9=9D=E6=99=A8?= Date: Thu, 20 Nov 2025 15:37:06 +0800 Subject: [PATCH] fix top k --- aworld/models/openai_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aworld/models/openai_provider.py b/aworld/models/openai_provider.py index b4d9b7093..5d8e7a039 100644 --- a/aworld/models/openai_provider.py +++ b/aworld/models/openai_provider.py @@ -473,7 +473,7 @@ def get_openai_params(self, "frequency_penalty", "logit_bias", "logprobs", "top_logprobs", "presence_penalty", "response_format", "seed", "stream", "top_p", "user", "function_call", "functions", "tools", "tool_choice", "metadata", - "prompt_cache_key", "safety_identifier", "store", "verbosity", "extra_body" + "prompt_cache_key", "safety_identifier", "store", "verbosity", "extra_body","top_k" ] llm_params = self.kwargs.get("params", {})