Skip to content

Commit a5ba919

Browse files
authored
Merge pull request #4 from HelpingAI/copilot/fix-f2325e26-70dc-4a4a-83b9-f7072a4289fe
Fix tool conversion warnings and improve HTTP 400 error guidance
2 parents bf27795 + e914578 commit a5ba919

File tree

3 files changed

+300
-6
lines changed

3 files changed

+300
-6
lines changed

HelpingAI/client.py

Lines changed: 57 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -146,10 +146,31 @@ def _request(
146146
raise InvalidModelError(model_name, response.status_code, response.headers)
147147
else:
148148
raise InvalidModelError("Unknown model", response.status_code, response.headers)
149-
# Add hint for generic 400 errors when not streaming
150-
if not stream and "Request failed with status code" in error_message:
151-
error_message += ". This model or endpoint might require streaming. Try setting stream=True."
152-
raise InvalidRequestError(error_message, status_code=response.status_code, headers=response.headers)
149+
150+
# Enhanced guidance for 400 errors
151+
enhanced_message = error_message
152+
if not stream:
153+
# Check for various indicators that streaming might be required
154+
streaming_indicators = [
155+
"Request failed with status code",
156+
"streaming",
157+
"stream",
158+
"tool", # Some tool-related requests might require streaming
159+
"function" # Function calling might require streaming
160+
]
161+
162+
if any(indicator in error_message.lower() for indicator in streaming_indicators):
163+
enhanced_message += (
164+
". This model or endpoint might require streaming. "
165+
"Try setting stream=True in your request."
166+
)
167+
else:
168+
enhanced_message += (
169+
". If this error persists, try setting stream=True or "
170+
"check your request parameters."
171+
)
172+
173+
raise InvalidRequestError(enhanced_message, status_code=response.status_code, headers=response.headers)
153174
elif response.status_code == 429:
154175
raise TooManyRequestsError(response.status_code, response.headers)
155176
elif response.status_code == 503:
@@ -519,13 +540,43 @@ def _convert_tools_parameter(
519540
return ensure_openai_format(tools)
520541
except ImportError:
521542
# Fallback if tools module not available - treat as legacy format
543+
import warnings
544+
warnings.warn(
545+
"Tools module not available. Install optional dependencies with: pip install 'HelpingAI[mcp]'. "
546+
"Using legacy tool format."
547+
)
522548
if isinstance(tools, list):
523549
return tools
524550
return None
525551
except Exception as e:
526-
# Log warning but don't break existing functionality
552+
# Enhanced error handling with better guidance
527553
import warnings
528-
warnings.warn(f"Tool conversion failed: {e}. Using legacy behavior.")
554+
error_msg = str(e)
555+
556+
# Provide more helpful error messages based on the error type
557+
if "Unknown built-in tool" in error_msg:
558+
available_tools = "code_interpreter, web_search"
559+
warnings.warn(
560+
f"Tool conversion failed: {e}. "
561+
f"Available built-in tools: {available_tools}. "
562+
f"For custom tools, use OpenAI tool format. Using legacy behavior."
563+
)
564+
elif "Unsupported tool item type" in error_msg:
565+
warnings.warn(
566+
f"Tool conversion failed: {e}. "
567+
f"Tools must be strings (built-in tool names), dicts (OpenAI format), "
568+
f"or MCP server configs. Using legacy behavior."
569+
)
570+
elif "Unsupported tools format" in error_msg:
571+
warnings.warn(
572+
f"Tool conversion failed: {e}. "
573+
f"Supported formats: None, string (category), List[Dict] (OpenAI format), "
574+
f"List[str] (built-in tools), or List[Fn]. Using legacy behavior."
575+
)
576+
else:
577+
warnings.warn(f"Tool conversion failed: {e}. Using legacy behavior.")
578+
579+
# Fallback to legacy behavior - return tools as-is if it's a list
529580
if isinstance(tools, list):
530581
return tools
531582
return None

HelpingAI/tools/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
)
2626
from .compatibility import (
2727
ensure_tool_format,
28+
ensure_openai_format,
2829
convert_legacy_tools,
2930
merge_tool_lists,
3031
create_fn_from_tool_dict,
@@ -58,6 +59,7 @@
5859

5960
# Compatibility utilities
6061
"ensure_tool_format",
62+
"ensure_openai_format",
6163
"convert_legacy_tools",
6264
"merge_tool_lists",
6365
"create_fn_from_tool_dict",

examples/troubleshooting_guide.py

Lines changed: 241 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,241 @@
1+
#!/usr/bin/env python3
2+
"""
3+
HelpingAI Tool Usage Examples
4+
5+
This script demonstrates proper usage patterns for the HelpingAI client,
6+
specifically addressing common issues with tool configuration and API requests.
7+
8+
Common Issues Addressed:
9+
1. Tool conversion errors - "Unsupported tools format"
10+
2. HTTP 400 errors suggesting stream=True
11+
3. Proper tool format specifications
12+
"""
13+
14+
import os
15+
import sys
16+
17+
# Add parent directory to path for development
18+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
19+
20+
from HelpingAI import HAI
21+
22+
def example_built_in_tools():
23+
"""Example: Using built-in tools correctly."""
24+
print("=== Example 1: Built-in Tools ===")
25+
26+
client = HAI(api_key=os.getenv("HAI_API_KEY", "your-api-key"))
27+
28+
# ✅ CORRECT: Use built-in tool names as strings
29+
tools = ["code_interpreter", "web_search"]
30+
31+
try:
32+
response = client.chat.completions.create(
33+
model="HelpingAI2.5-10B",
34+
messages=[{"role": "user", "content": "What's 2+2 and search for Python tutorials?"}],
35+
tools=tools,
36+
stream=False # Try stream=True if you get HTTP 400 errors
37+
)
38+
print("✅ Request successful with built-in tools")
39+
40+
except Exception as e:
41+
print(f"❌ Error: {e}")
42+
if "400" in str(e) and "stream" in str(e).lower():
43+
print("💡 Tip: Try setting stream=True")
44+
45+
def example_openai_format_tools():
46+
"""Example: Using OpenAI-format tools correctly."""
47+
print("\n=== Example 2: OpenAI Format Tools ===")
48+
49+
client = HAI(api_key=os.getenv("HAI_API_KEY", "your-api-key"))
50+
51+
# ✅ CORRECT: OpenAI tool format
52+
tools = [
53+
{
54+
"type": "function",
55+
"function": {
56+
"name": "calculate",
57+
"description": "Perform basic math calculations",
58+
"parameters": {
59+
"type": "object",
60+
"properties": {
61+
"expression": {
62+
"type": "string",
63+
"description": "Math expression to evaluate"
64+
}
65+
},
66+
"required": ["expression"]
67+
}
68+
}
69+
}
70+
]
71+
72+
try:
73+
response = client.chat.completions.create(
74+
model="HelpingAI2.5-10B",
75+
messages=[{"role": "user", "content": "Calculate 15 * 23"}],
76+
tools=tools
77+
)
78+
print("✅ Request successful with OpenAI format tools")
79+
80+
except Exception as e:
81+
print(f"❌ Error: {e}")
82+
83+
def example_mcp_tools():
84+
"""Example: Using MCP (Model Context Protocol) tools correctly."""
85+
print("\n=== Example 3: MCP Tools ===")
86+
87+
client = HAI(api_key=os.getenv("HAI_API_KEY", "your-api-key"))
88+
89+
# ✅ CORRECT: MCP server configuration
90+
tools = [
91+
{
92+
'mcpServers': {
93+
'time': {
94+
'command': 'uvx',
95+
'args': ['mcp-server-time']
96+
},
97+
'fetch': {
98+
'command': 'uvx',
99+
'args': ['mcp-server-fetch']
100+
}
101+
}
102+
}
103+
]
104+
105+
try:
106+
response = client.chat.completions.create(
107+
model="HelpingAI2.5-10B",
108+
messages=[{"role": "user", "content": "What time is it?"}],
109+
tools=tools
110+
)
111+
print("✅ Request successful with MCP tools")
112+
113+
except ImportError:
114+
print("❌ MCP dependencies not installed. Run: pip install 'HelpingAI[mcp]'")
115+
except Exception as e:
116+
print(f"❌ Error: {e}")
117+
118+
def example_mixed_tools():
119+
"""Example: Mixing different tool types correctly."""
120+
print("\n=== Example 4: Mixed Tools ===")
121+
122+
client = HAI(api_key=os.getenv("HAI_API_KEY", "your-api-key"))
123+
124+
# ✅ CORRECT: Mix built-in tools with OpenAI format
125+
tools = [
126+
"code_interpreter", # Built-in tool
127+
{
128+
"type": "function",
129+
"function": {
130+
"name": "custom_tool",
131+
"description": "A custom tool",
132+
"parameters": {"type": "object", "properties": {}}
133+
}
134+
}
135+
]
136+
137+
try:
138+
response = client.chat.completions.create(
139+
model="HelpingAI2.5-10B",
140+
messages=[{"role": "user", "content": "Help me with coding"}],
141+
tools=tools
142+
)
143+
print("✅ Request successful with mixed tools")
144+
145+
except Exception as e:
146+
print(f"❌ Error: {e}")
147+
148+
def example_streaming_usage():
149+
"""Example: Using streaming to avoid HTTP 400 errors."""
150+
print("\n=== Example 5: Streaming Usage ===")
151+
152+
client = HAI(api_key=os.getenv("HAI_API_KEY", "your-api-key"))
153+
154+
try:
155+
# If you get HTTP 400 errors, try streaming
156+
response = client.chat.completions.create(
157+
model="HelpingAI2.5-10B",
158+
messages=[{"role": "user", "content": "Tell me a story"}],
159+
tools=["web_search"],
160+
stream=True # 🔑 KEY: Enable streaming
161+
)
162+
163+
print("✅ Streaming request initiated")
164+
165+
# Process streaming response
166+
for chunk in response:
167+
if chunk.choices[0].delta.content:
168+
print(chunk.choices[0].delta.content, end="")
169+
print("\n✅ Streaming completed")
170+
171+
except Exception as e:
172+
print(f"❌ Error: {e}")
173+
174+
def common_mistakes():
175+
"""Examples of common mistakes to avoid."""
176+
print("\n=== Common Mistakes to Avoid ===")
177+
178+
# ❌ WRONG: Invalid tool names
179+
print("❌ DON'T: Use invalid built-in tool names")
180+
print(" tools = ['invalid_tool'] # Will cause warnings")
181+
182+
# ❌ WRONG: Wrong data types
183+
print("❌ DON'T: Use wrong data types for tools")
184+
print(" tools = [1, 2, 3] # Will cause warnings")
185+
186+
# ❌ WRONG: Incorrect format
187+
print("❌ DON'T: Use incorrect tool format")
188+
print(" tools = {'not': 'a list'} # Should be a list")
189+
190+
# ✅ CORRECT alternatives
191+
print("\n✅ DO: Use correct formats")
192+
print(" tools = ['code_interpreter', 'web_search'] # Built-in tools")
193+
print(" tools = [{'type': 'function', ...}] # OpenAI format")
194+
print(" tools = [{'mcpServers': {...}}] # MCP format")
195+
196+
def troubleshooting_tips():
197+
"""Troubleshooting tips for common issues."""
198+
print("\n=== Troubleshooting Tips ===")
199+
200+
print("🔧 If you see 'Tool conversion failed' warnings:")
201+
print(" - Check that tool names are correct (code_interpreter, web_search)")
202+
print(" - Ensure tools are in proper format (list of strings/dicts)")
203+
print(" - For MCP tools, install: pip install 'HelpingAI[mcp]'")
204+
205+
print("\n🔧 If you get HTTP 400 'stream=True' errors:")
206+
print(" - Try setting stream=True in your request")
207+
print(" - Some models/endpoints require streaming")
208+
print(" - Tool-heavy requests often need streaming")
209+
210+
print("\n🔧 If you get 'Unknown built-in tool' errors:")
211+
print(" - Available built-in tools: code_interpreter, web_search")
212+
print(" - For custom tools, use OpenAI format with 'type': 'function'")
213+
214+
print("\n🔧 For MCP tools:")
215+
print(" - Install MCP dependencies: pip install 'HelpingAI[mcp]'")
216+
print(" - Ensure MCP servers are properly configured")
217+
print(" - Check server commands and arguments")
218+
219+
if __name__ == "__main__":
220+
print("HelpingAI Tool Usage Examples")
221+
print("=" * 40)
222+
223+
# Set up API key check
224+
if not os.getenv("HAI_API_KEY"):
225+
print("⚠️ Set HAI_API_KEY environment variable to run actual requests")
226+
print(" Examples will show structure without making API calls")
227+
print()
228+
229+
# Run examples
230+
example_built_in_tools()
231+
example_openai_format_tools()
232+
example_mcp_tools()
233+
example_mixed_tools()
234+
example_streaming_usage()
235+
236+
# Show common mistakes and tips
237+
common_mistakes()
238+
troubleshooting_tips()
239+
240+
print("\n✅ For more examples, see: examples/mcp_example.py")
241+
print("📚 Documentation: https://helpingai.co/docs")

0 commit comments

Comments
 (0)