1313import requests
1414
1515
16- __version__ = "0.1.0 "
16+ __version__ = "0.1.2 "
1717__all__ = [
1818 "SimStudioClient" ,
1919 "SimStudioError" ,
@@ -64,15 +64,6 @@ class RateLimitInfo:
6464 retry_after : Optional [int ] = None
6565
6666
67- @dataclass
68- class RateLimitStatus :
69- """Rate limit status for sync/async requests."""
70- is_limited : bool
71- limit : int
72- remaining : int
73- reset_at : str
74-
75-
7667@dataclass
7768class UsageLimits :
7869 """Usage limits and quota information."""
@@ -115,7 +106,6 @@ def _convert_files_to_base64(self, value: Any) -> Any:
115106 Recursively processes nested dicts and lists.
116107 """
117108 import base64
118- import io
119109
120110 # Check if this is a file-like object
121111 if hasattr (value , 'read' ) and callable (value .read ):
@@ -159,7 +149,8 @@ def _convert_files_to_base64(self, value: Any) -> Any:
159149 def execute_workflow (
160150 self ,
161151 workflow_id : str ,
162- input_data : Optional [Dict [str , Any ]] = None ,
152+ input : Optional [Any ] = None ,
153+ * ,
163154 timeout : float = 30.0 ,
164155 stream : Optional [bool ] = None ,
165156 selected_outputs : Optional [list ] = None ,
@@ -169,11 +160,13 @@ def execute_workflow(
169160 Execute a workflow with optional input data.
170161 If async_execution is True, returns immediately with a task ID.
171162
172- File objects in input_data will be automatically detected and converted to base64.
163+ File objects in input will be automatically detected and converted to base64.
173164
174165 Args:
175166 workflow_id: The ID of the workflow to execute
176- input_data: Input data to pass to the workflow (can include file-like objects)
167+ input: Input data to pass to the workflow. Can be a dict (spread at root level),
168+ primitive value (string, number, bool), or list (wrapped in 'input' field).
169+ File-like objects within dicts are automatically converted to base64.
177170 timeout: Timeout in seconds (default: 30.0)
178171 stream: Enable streaming responses (default: None)
179172 selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
@@ -193,8 +186,15 @@ def execute_workflow(
193186 headers ['X-Execution-Mode' ] = 'async'
194187
195188 try :
196- # Build JSON body - spread input at root level, then add API control parameters
197- body = input_data .copy () if input_data is not None else {}
189+ # Build JSON body - spread dict inputs at root level, wrap primitives/lists in 'input' field
190+ body = {}
191+ if input is not None :
192+ if isinstance (input , dict ):
193+ # Dict input: spread at root level (matches curl/API behavior)
194+ body = input .copy ()
195+ else :
196+ # Primitive or list input: wrap in 'input' field
197+ body = {'input' : input }
198198
199199 # Convert any file objects in the input to base64 format
200200 body = self ._convert_files_to_base64 (body )
@@ -320,20 +320,18 @@ def validate_workflow(self, workflow_id: str) -> bool:
320320 def execute_workflow_sync (
321321 self ,
322322 workflow_id : str ,
323- input_data : Optional [Dict [str , Any ]] = None ,
323+ input : Optional [Any ] = None ,
324+ * ,
324325 timeout : float = 30.0 ,
325326 stream : Optional [bool ] = None ,
326327 selected_outputs : Optional [list ] = None
327328 ) -> WorkflowExecutionResult :
328329 """
329- Execute a workflow and poll for completion (useful for long-running workflows).
330-
331- Note: Currently, the API is synchronous, so this method just calls execute_workflow.
332- In the future, if async execution is added, this method can be enhanced.
330+ Execute a workflow synchronously (ensures non-async mode).
333331
334332 Args:
335333 workflow_id: The ID of the workflow to execute
336- input_data : Input data to pass to the workflow (can include file-like objects)
334+ input : Input data to pass to the workflow (can include file-like objects)
337335 timeout: Timeout for the initial request in seconds
338336 stream: Enable streaming responses (default: None)
339337 selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
@@ -344,9 +342,14 @@ def execute_workflow_sync(
344342 Raises:
345343 SimStudioError: If the workflow execution fails
346344 """
347- # For now, the API is synchronous, so we just execute directly
348- # In the future, if async execution is added, this method can be enhanced
349- return self .execute_workflow (workflow_id , input_data , timeout , stream , selected_outputs )
345+ return self .execute_workflow (
346+ workflow_id ,
347+ input ,
348+ timeout = timeout ,
349+ stream = stream ,
350+ selected_outputs = selected_outputs ,
351+ async_execution = False
352+ )
350353
351354 def set_api_key (self , api_key : str ) -> None :
352355 """
@@ -410,7 +413,8 @@ def get_job_status(self, task_id: str) -> Dict[str, Any]:
410413 def execute_with_retry (
411414 self ,
412415 workflow_id : str ,
413- input_data : Optional [Dict [str , Any ]] = None ,
416+ input : Optional [Any ] = None ,
417+ * ,
414418 timeout : float = 30.0 ,
415419 stream : Optional [bool ] = None ,
416420 selected_outputs : Optional [list ] = None ,
@@ -425,7 +429,7 @@ def execute_with_retry(
425429
426430 Args:
427431 workflow_id: The ID of the workflow to execute
428- input_data : Input data to pass to the workflow (can include file-like objects)
432+ input : Input data to pass to the workflow (can include file-like objects)
429433 timeout: Timeout in seconds
430434 stream: Enable streaming responses
431435 selected_outputs: Block outputs to stream
@@ -448,11 +452,11 @@ def execute_with_retry(
448452 try :
449453 return self .execute_workflow (
450454 workflow_id ,
451- input_data ,
452- timeout ,
453- stream ,
454- selected_outputs ,
455- async_execution
455+ input ,
456+ timeout = timeout ,
457+ stream = stream ,
458+ selected_outputs = selected_outputs ,
459+ async_execution = async_execution
456460 )
457461 except SimStudioError as e :
458462 if e .code != 'RATE_LIMIT_EXCEEDED' :
0 commit comments