Skip to content

Commit 16f821c

Browse files
[BUZZOK-27550] Bump GenAI Agents image dependencies and scripts (#1654)
* Update GenAI agents dependencies and scripts * delint * Reconcile dependencies, updated IDs, tags * Undo * Redo requirements.txt * Reconcile dependencies, updated IDs, tags --------- Co-authored-by: svc-harness-git2 <svc-harness-git2@datarobot.com>
1 parent e7b5d6a commit 16f821c

File tree

4 files changed

+29
-90
lines changed

4 files changed

+29
-90
lines changed

public_dropin_environments/python311_genai_agents/env_info.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"description": "This template environment can be used to create GenAI-powered agents using CrewAI, LangGraph, or Llama-Index. Similar to other drop-in environments, you can either include a .pth artifact or any other code needed to deserialize your model, and optionally a custom.py file. You can also use this environment in codespaces.",
55
"programmingLanguage": "python",
66
"label": "",
7-
"environmentVersionId": "68bf53b6d6572611f9494cdc",
7+
"environmentVersionId": "68c049d35fc47911f3d87960",
88
"environmentVersionDescription": "",
99
"isPublic": true,
1010
"isDownloadable": true,
@@ -15,8 +15,8 @@
1515
"contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python311_genai_agents",
1616
"imageRepository": "env-python-genai-agents",
1717
"tags": [
18-
"v11.2.0-68bf53b6d6572611f9494cdc",
19-
"68bf53b6d6572611f9494cdc",
18+
"v11.2.0-68c049d35fc47911f3d87960",
19+
"68c049d35fc47911f3d87960",
2020
"v11.2.0-latest"
2121
]
2222
}

public_dropin_environments/python311_genai_agents/requirements.in

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ urllib3>=2.5.0
2121

2222
# GenAI Environment
2323
click~=8.1.8
24-
crewai>=0.140.0
25-
crewai-tools>=0.48.0
24+
crewai>=0.177.0
25+
crewai-tools>=0.69.0
2626
datarobot-drum>=1.16.22
2727
datarobot-moderations>=11.2.3
2828
datarobot-mlops>=11.1.0

public_dropin_environments/python311_genai_agents/requirements.txt

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,14 @@ aioboto3==14.3.0
99
aiobotocore[boto3]==2.22.0
1010
aiofiles==24.1.0
1111
aiohappyeyeballs==2.6.1
12-
aiohttp==3.12.14
12+
aiohttp==3.11.18
1313
aioitertools==0.12.0
14-
aiosignal==1.4.0
14+
aiosignal==1.3.2
1515
aiosqlite==0.21.0
1616
alembic==1.15.2
1717
annotated-types==0.7.0
1818
annoy==1.17.3
19-
anthropic==0.49.0
19+
anthropic==0.66.0
2020
anyio==4.9.0
2121
appdirs==1.4.4
2222
argcomplete==3.6.2
@@ -31,13 +31,14 @@ azure-core==1.34.0
3131
azure-identity==1.22.0
3232
azure-storage-blob==12.19.0
3333
backoff==2.2.1
34-
banks==2.1.2
34+
banks==2.2.0
3535
bcrypt==4.3.0
3636
beautifulsoup4==4.13.4
3737
bleach[css]==6.2.0
3838
blinker==1.9.0
3939
boto3==1.37.3
4040
botocore==1.37.3
41+
browserbase==1.4.0
4142
build==1.2.2.post1
4243
cachetools==5.5.2
4344
certifi==2025.4.26
@@ -50,8 +51,8 @@ cohere==5.15.0
5051
colorama==0.4.6
5152
coloredlogs==15.0.1
5253
comm==0.2.2
53-
crewai==0.140.0
54-
crewai-tools==0.48.0
54+
crewai==0.177.0
55+
crewai-tools==0.69.0
5556
cryptography==44.0.3
5657
dataclasses-json==0.6.7
5758
datarobot[auth]==3.8.2
@@ -111,7 +112,7 @@ grpc-google-iam-v1==0.14.2
111112
grpcio==1.71.0
112113
grpcio-status==1.71.0
113114
h11==0.16.0
114-
h2==4.3.0
115+
h2==4.2.0
115116
hf-xet==1.1.0
116117
hpack==4.1.0
117118
httpcore==1.0.9
@@ -157,7 +158,7 @@ kubernetes==32.0.1
157158
lancedb==0.22.0
158159
langchain==0.3.27
159160
langchain-cohere==0.3.5
160-
langchain-community==0.3.27
161+
langchain-community==0.3.29
161162
langchain-core==0.3.75
162163
langchain-experimental==0.3.4
163164
langchain-mcp-adapters==0.1.9
@@ -171,17 +172,17 @@ langgraph-sdk==0.1.66
171172
langsmith==0.3.45
172173
lark==1.2.2
173174
legacy-cgi==2.6.3
174-
litellm==1.72.6
175+
litellm==1.74.9
175176
llama-cloud==0.1.21
176177
llama-cloud-services==0.6.15
177-
llama-index==0.12.51
178+
llama-index==0.12.52
178179
llama-index-agent-openai==0.4.12
179180
llama-index-cli==0.4.4
180-
llama-index-core==0.12.51
181+
llama-index-core==0.12.52.post1
181182
llama-index-embeddings-azure-openai==0.3.7
182183
llama-index-embeddings-openai==0.3.1
183184
llama-index-indices-managed-llama-cloud==0.6.11
184-
llama-index-instrumentation==0.3.0
185+
llama-index-instrumentation==0.2.0
185186
llama-index-llms-azure-openai==0.3.4
186187
llama-index-llms-bedrock-converse==0.7.1
187188
llama-index-llms-langchain==0.6.1
@@ -287,8 +288,9 @@ pdfplumber==0.11.6
287288
pexpect==4.9.0
288289
pillow==11.3.0
289290
platformdirs==4.3.8
291+
playwright==1.55.0
290292
pluggy==1.6.0
291-
portalocker==2.10.1
293+
portalocker==2.7.0
292294
posthog==3.25.0
293295
progress==1.6
294296
prometheus-client==0.21.1
@@ -307,7 +309,8 @@ pyasn1-modules==0.4.2
307309
pycparser==2.22
308310
pydantic==2.11.7
309311
pydantic-core==2.33.2
310-
pydantic-settings==2.9.1
312+
pydantic-settings==2.10.1
313+
pyee==13.0.0
311314
pyfiglet==1.0.3
312315
pygments==2.19.1
313316
pyjwt[crypto]==2.10.1
@@ -335,7 +338,7 @@ qdrant-client==1.14.2
335338
ragas @ git+https://github.com/explodinggradients/ragas@5d59549ad5ef511f621502c563bc55ac5aeb9188#subdirectory=ragas
336339
referencing==0.36.2
337340
regex==2024.11.6
338-
requests==2.32.4
341+
requests==2.32.5
339342
requests-oauthlib==2.0.0
340343
requests-toolbelt==1.0.0
341344
rfc3339-validator==0.1.4
@@ -357,8 +360,9 @@ six==1.17.0
357360
sniffio==1.3.1
358361
soupsieve==2.7
359362
sqlalchemy[asyncio]==2.0.40
360-
sse-starlette==3.0.2
363+
sse-starlette==2.4.1
361364
stack-data==0.6.3
365+
stagehand==0.5.0
362366
starlette==0.46.2
363367
strenum==0.4.15
364368
strictyaml==1.4.2

public_dropin_environments/python311_genai_agents/run_agent.py

Lines changed: 4 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -22,18 +22,14 @@
2222
from typing import Any, TextIO, cast
2323
from urllib.parse import urlparse, urlunparse
2424

25-
import requests
2625
from datarobot_drum.drum.enum import TargetType
2726
from datarobot_drum.drum.root_predictors.drum_inline_utils import drum_inline_predictor
28-
from datarobot_drum.drum.root_predictors.drum_server_utils import DrumServerRun
29-
from openai import OpenAI
3027
from openai.types.chat import ChatCompletion
3128
from openai.types.chat.completion_create_params import (
3229
CompletionCreateParamsBase,
3330
)
3431
from opentelemetry import trace
3532
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
36-
from opentelemetry.propagate import inject
3733
from opentelemetry.sdk.trace import TracerProvider
3834
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
3935
from opentelemetry.trace import Span, use_span
@@ -89,11 +85,6 @@ def argparse_args() -> argparse.Namespace:
8985
default=None,
9086
help="Custom attributes for tracing. Should be a JSON dictionary.",
9187
)
92-
parser.add_argument(
93-
"--use_serverless",
94-
action="store_true",
95-
help="Use DRUM serverless predictor.",
96-
)
9788
args = parser.parse_args()
9889
return args
9990

@@ -203,55 +194,6 @@ def setup_otel(args: Any) -> Span:
203194
return span
204195

205196

206-
def execute_drum(
207-
chat_completion: CompletionCreateParamsBase,
208-
default_headers: dict[str, str],
209-
custom_model_dir: Path,
210-
) -> ChatCompletion:
211-
root.info("Executing agent as [chat] endpoint. DRUM Executor.")
212-
root.info("Starting DRUM server.")
213-
with DrumServerRun(
214-
target_type=TargetType.AGENTIC_WORKFLOW.value,
215-
labels=None,
216-
custom_model_dir=custom_model_dir,
217-
with_error_server=True,
218-
production=False,
219-
verbose=True,
220-
logging_level="info",
221-
target_name="response",
222-
wait_for_server_timeout=360,
223-
port=get_open_port(),
224-
stream_output=True,
225-
max_workers=2, # this will force drum tracing to not use batchprocessor
226-
) as drum_runner:
227-
root.info("Verifying DRUM server")
228-
response = requests.get(drum_runner.url_server_address)
229-
if not response.ok:
230-
root.error("Server failed to start")
231-
try:
232-
root.error(response.text)
233-
finally:
234-
raise RuntimeError("Server failed to start")
235-
236-
# inject OTEL headers into default_headers
237-
inject(default_headers)
238-
239-
# Use a standard OpenAI client to call the DRUM server. This mirrors the behavior of a deployed agent.
240-
# Using the `chat.completions.create` method ensures the parameters are OpenAI compatible.
241-
root.info("Executing Agent")
242-
client = OpenAI(
243-
base_url=drum_runner.url_server_address,
244-
api_key="not-required",
245-
default_headers=default_headers,
246-
max_retries=0,
247-
)
248-
completion = client.chat.completions.create(**chat_completion)
249-
250-
# Continue outside the context manager to ensure the server is stopped and logs
251-
# are flushed before we write the output
252-
return completion
253-
254-
255197
def execute_drum_inline(
256198
chat_completion: CompletionCreateParamsBase,
257199
custom_model_dir: Path,
@@ -304,17 +246,10 @@ def run_agent_procedure(args: Any) -> None:
304246
root.info(f"Trace id: {trace_id}")
305247

306248
root.info(f"Executing request in directory {args.custom_model_dir}")
307-
if args.use_serverless:
308-
result = execute_drum_inline(
309-
chat_completion=chat_completion,
310-
custom_model_dir=args.custom_model_dir,
311-
)
312-
else:
313-
result = execute_drum(
314-
chat_completion=chat_completion,
315-
default_headers=default_headers,
316-
custom_model_dir=args.custom_model_dir,
317-
)
249+
result = execute_drum_inline(
250+
chat_completion=chat_completion,
251+
custom_model_dir=args.custom_model_dir,
252+
)
318253
store_result(
319254
result,
320255
trace_id,

0 commit comments

Comments
 (0)