From 1fd40635c7a2761cc4bbf7b529684459fcf7afce Mon Sep 17 00:00:00 2001 From: Teo Date: Mon, 27 Jan 2025 19:45:03 +0200 Subject: [PATCH 01/31] logcapture.py, instrumentation.py, session helpers Signed-off-by: Teo --- agentops/instrumentation.py | 49 ++++ agentops/log_capture.py | 306 +++++++++++++++++++++++++ agentops/session.py | 17 ++ uv.lock | 439 +++++++++++++++++++++++++----------- 4 files changed, 682 insertions(+), 129 deletions(-) create mode 100644 agentops/instrumentation.py create mode 100644 agentops/log_capture.py diff --git a/agentops/instrumentation.py b/agentops/instrumentation.py new file mode 100644 index 000000000..3d7504b7d --- /dev/null +++ b/agentops/instrumentation.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +import logging +import sys +from typing import TYPE_CHECKING, Dict, List, Optional +from uuid import UUID + +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import SpanProcessor, TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter +from opentelemetry.sdk.trace.sampling import ParentBased, Sampler, TraceIdRatioBased + +if TYPE_CHECKING: + from opentelemetry.sdk._logs import LoggingHandler + + from agentops.client import Client + + +_log_handler = None + + +def set_log_handler(log_handler: Optional[LoggingHandler]) -> None: + """Set the OTLP log handler. + + Args: + log_handler: The logging handler to use for OTLP + """ + _log_handler = log_handler + + +def get_log_handler() -> Optional[LoggingHandler]: + """Get the current OTLP log handler. + + Returns: + The current logging handler if set, None otherwise + """ + return _log_handler + + +def add_telemetry_log_handler(logger: logging.Logger) -> None: + """Add the OTLP log handler to the given logger if configured. + + Args: + logger: The logger to add the handler to + """ + global _log_handler + if _log_handler: + logger.addHandler(_log_handler) diff --git a/agentops/log_capture.py b/agentops/log_capture.py new file mode 100644 index 000000000..81d16c6f2 --- /dev/null +++ b/agentops/log_capture.py @@ -0,0 +1,306 @@ +import logging +import sys +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, Dict, List, Optional +from uuid import UUID + +from opentelemetry import trace +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter +from opentelemetry.sdk.resources import Resource + +from agentops.instrumentation import get_log_handler, set_log_handler + +if TYPE_CHECKING: + from agentops.session import Session + + +@dataclass +class LogCapture: + """Captures terminal output for a session using OpenTelemetry logging. + + Integrates with TelemetryManager to use consistent configuration and logging setup. + If no telemetry manager is available, creates a standalone logging setup. + + Attributes: + session_id + stdout_line_count: Number of lines written to stdout + stderr_line_count: Number of lines written to stderr + log_level_counts: Count of log messages by level + start_time: ISO timestamp when capture started + end_time: ISO timestamp when capture stopped + is_capturing: Whether capture is currently active + """ + + session_id: UUID + stdout_line_count: int = field(default=0) + stderr_line_count: int = field(default=0) + log_level_counts: Dict[str, int] = field( + default_factory=lambda: {"INFO": 0, "WARNING": 0, "ERROR": 0, "DEBUG": 0, "CRITICAL": 0} + ) + start_time: Optional[str] = field(default=None) + end_time: Optional[str] = field(default=None) + is_capturing: bool = field(default=False) + + # Private implementation fields + _stdout_logger: logging.Logger = field(init=False, repr=False) + _stderr_logger: logging.Logger = field(init=False, repr=False) + _stdout: Optional[object] = field(default=None, init=False, repr=False) + _stderr: Optional[object] = field(default=None, init=False, repr=False) + _handler: Optional[LoggingHandler] = field(default=None, init=False, repr=False) + _logger_provider: Optional[LoggerProvider] = field(default=None, init=False, repr=False) + _owns_handler: bool = field(default=False, init=False, repr=False) + _session: Optional["Session"] = field(default=None, init=False, repr=False) + + def __post_init__(self): + """Initialize loggers after dataclass initialization""" + # Use unique logger names to avoid conflicts + self._stdout_logger = logging.getLogger(f"agentops.stdout.{id(self)}") + self._stderr_logger = logging.getLogger(f"agentops.stderr.{id(self)}") + + # Configure loggers to not propagate to parent loggers + for logger in (self._stdout_logger, self._stderr_logger): + logger.setLevel(logging.INFO) + logger.propagate = False + logger.handlers.clear() + + @property + def session(self) -> Optional["Session"]: + """Get the associated session instance""" + if self._session is None: + from agentops.session import get_active_sessions + + for session in get_active_sessions(): + if session.session_id == self.session_id: + self._session = session + break + return self._session + + def start(self): + """Start capturing output using OTEL logging handler""" + if self._stdout is not None or not self.session: + return + + from agentops.helpers import get_ISO_time + + self.start_time = get_ISO_time() + self.is_capturing = True + + # Try to get handler from telemetry manager + get_log_handler() + + # Create our own handler if none exists + if not self._handler: + self._owns_handler = True + + # Use session's resource attributes if available + resource_attrs = {"service.name": "agentops", "session.id": str(self.session_id)} + + # resource_attrs.update(config.resource_attributes) + + # Setup logger provider with console exporter + resource = Resource.create(resource_attrs) + self._logger_provider = LoggerProvider(resource=resource) + exporter = ConsoleLogExporter() + self._logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) + + self._handler = LoggingHandler( + level=logging.INFO, + logger_provider=self._logger_provider, + ) + + # Register with telemetry manager if available + set_log_handler(self._handler) + + # Add handler to both loggers + self._stdout_logger.addHandler(self._handler) + self._stderr_logger.addHandler(self._handler) + + # Save original stdout/stderr + self._stdout = sys.stdout + self._stderr = sys.stderr + + # Replace with logging proxies + sys.stdout = self._StdoutProxy(self) + sys.stderr = self._StderrProxy(self) + + def stop(self): + """Stop capturing output and restore stdout/stderr""" + if self._stdout is None: + return + + from agentops.helpers import get_ISO_time + + self.end_time = get_ISO_time() + self.is_capturing = False + + # Restore original stdout/stderr + sys.stdout = self._stdout + sys.stderr = self._stderr + self._stdout = None + self._stderr = None + + # Clean up handlers + if self._handler: + self._stdout_logger.removeHandler(self._handler) + self._stderr_logger.removeHandler(self._handler) + + # Only close/shutdown if we own the handler + if self._owns_handler: + self._handler.close() + if self._logger_provider: + self._logger_provider.shutdown() + + # Clear from telemetry manager if we created it + set_log_handler(None) + + self._handler = None + self._logger_provider = None + + def flush(self): + """Flush any buffered logs""" + if self._handler: + self._handler.flush() + + def get_summary(self) -> Dict[str, Any]: + """Get a summary of the log capture statistics. + + Returns: + Dict containing log capture metrics and metadata + """ + return { + "stdout_lines": self.stdout_line_count, + "stderr_lines": self.stderr_line_count, + "log_levels": self.log_level_counts, + "start_time": self.start_time, + "end_time": self.end_time, + "duration": self._calculate_duration() if self.start_time and self.end_time else None, + "is_capturing": self.is_capturing, + } + + def _calculate_duration(self) -> float: + """Calculate duration of log capture in seconds""" + from datetime import datetime + + start = datetime.fromisoformat(self.start_time.replace("Z", "+00:00")) + end = datetime.fromisoformat(self.end_time.replace("Z", "+00:00")) + return (end - start).total_seconds() + + def to_span_data(self) -> Dict[str, Any]: + """Convert log capture data into span attributes. + + Returns: + Dict of attributes suitable for OpenTelemetry spans/events + """ + data = { + "session.id": str(self.session_id), + "log.stdout_count": self.stdout_line_count, + "log.stderr_count": self.stderr_line_count, + "log.start_time": self.start_time, + "log.end_time": self.end_time, + "log.is_capturing": self.is_capturing, + } + + # Add log level counts with proper prefix + for level, count in self.log_level_counts.items(): + data[f"log.level.{level.lower()}"] = count + + # Add duration if available + if self.start_time and self.end_time: + data["log.duration_seconds"] = self._calculate_duration() + + return data + + class _StdoutProxy: + """Proxies stdout to logger""" + + def __init__(self, capture): + self._capture = capture + self._logger = capture._stdout_logger + + def write(self, text): + if text.strip(): # Only log non-empty strings + self._capture.stdout_line_count += 1 + self._capture.log_level_counts["INFO"] += 1 + self._logger.info(text.rstrip()) + + def flush(self): + pass + + class _StderrProxy: + """Proxies stderr to logger""" + + def __init__(self, capture): + self._capture = capture + self._logger = capture._stderr_logger + + def write(self, text): + if text.strip(): # Only log non-empty strings + self._capture.stderr_line_count += 1 + self._capture.log_level_counts["ERROR"] += 1 + self._logger.error(text.rstrip()) + + def flush(self): + pass + + +if __name__ == "__main__": + import os + import sys + import time + from dataclasses import dataclass + from uuid import uuid4 + + # Add parent directory to path for imports + sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) + + from agentops.session import add_session # Changed from relative import + + # Create a mock session with telemetry + @dataclass + class MockSession: + session_id: UUID + + # Setup telemetry + + # Create session and add to registry + session = MockSession(session_id=uuid4()) + add_session(session) # Add session to registry so it can be found + + # Create and start capture + capture = LogCapture(session_id=session.session_id) + capture.start() + + try: + print("Regular stdout message") + print("Multi-line stdout message\nwith a second line") + sys.stderr.write("Error message to stderr\n") + + # Show that empty lines are ignored + print("") + print("\n\n") + + # Demonstrate concurrent output + def background_prints(): + for i in range(3): + time.sleep(0.5) + print(f"Background message {i}") + sys.stderr.write(f"Background error {i}\n") + + import threading + + thread = threading.Thread(target=background_prints) + thread.start() + + # Main thread output + for i in range(3): + time.sleep(0.7) + print(f"Main thread message {i}") + + thread.join() + + finally: + # Stop capture and show normal output is restored + capture.stop() + print("\nCapture stopped - this prints normally to stdout") + sys.stderr.write("This error goes normally to stderr\n") diff --git a/agentops/session.py b/agentops/session.py index 95d1fba15..0d67aa9d6 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -670,3 +670,20 @@ def session_url(self) -> str: active_sessions: List[Session] = [] + + +def add_session(session: "Session") -> None: + """Add session to active sessions list""" + if session not in active_sessions: + active_sessions.append(session) + + +def remove_session(session: "Session") -> None: + """Remove session from active sessions list""" + if session in active_sessions: + active_sessions.remove(session) + + +def get_active_sessions() -> List["Session"]: + """Get list of active sessions""" + return active_sessions diff --git a/uv.lock b/uv.lock index 59c59052d..81c1fec68 100644 --- a/uv.lock +++ b/uv.lock @@ -3,8 +3,10 @@ requires-python = ">=3.9, <3.14" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] @@ -23,7 +25,7 @@ constraints = [ [[package]] name = "agentops" -version = "0.3.23" +version = "0.3.26" source = { editable = "." } dependencies = [ { name = "opentelemetry-api", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -32,6 +34,7 @@ dependencies = [ { name = "opentelemetry-exporter-otlp-proto-http", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "opentelemetry-sdk", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "opentelemetry-sdk", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "packaging" }, { name = "psutil" }, { name = "pyyaml" }, { name = "requests" }, @@ -39,9 +42,6 @@ dependencies = [ ] [package.dev-dependencies] -ci = [ - { name = "tach" }, -] dev = [ { name = "mypy" }, { name = "pdbpp" }, @@ -65,6 +65,7 @@ test = [ { name = "autogen" }, { name = "cohere" }, { name = "fastapi", extra = ["standard"] }, + { name = "google-generativeai" }, { name = "groq" }, { name = "litellm" }, { name = "mistralai" }, @@ -81,6 +82,7 @@ requires-dist = [ { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, { name = "opentelemetry-sdk", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, { name = "opentelemetry-sdk", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, + { name = "packaging", specifier = ">=21.0,<25.0" }, { name = "psutil", specifier = ">=5.9.8,<6.1.0" }, { name = "pyyaml", specifier = ">=5.3,<7.0" }, { name = "requests", specifier = ">=2.0.0,<3.0.0" }, @@ -88,7 +90,6 @@ requires-dist = [ ] [package.metadata.requires-dev] -ci = [{ name = "tach", specifier = "~=0.9" }] dev = [ { name = "mypy" }, { name = "pdbpp", specifier = ">=0.10.3" }, @@ -111,6 +112,7 @@ test = [ { name = "autogen", specifier = "<0.4.0" }, { name = "cohere" }, { name = "fastapi", extras = ["standard"] }, + { name = "google-generativeai", specifier = ">=0.1.0" }, { name = "groq" }, { name = "litellm" }, { name = "mistralai" }, @@ -353,6 +355,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148 }, ] +[[package]] +name = "cachetools" +version = "5.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/74/57df1ab0ce6bc5f6fa868e08de20df8ac58f9c44330c7671ad922d2bbeae/cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95", size = 28044 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/4e/de4ff18bcf55857ba18d3a4bd48c8a9fde6bb0980c9d20b263f05387fd88/cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb", size = 9530 }, +] + [[package]] name = "certifi" version = "2024.12.14" @@ -856,27 +867,105 @@ wheels = [ ] [[package]] -name = "gitdb" -version = "4.0.12" +name = "google-ai-generativelanguage" +version = "0.6.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "proto-plus" }, + { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/d1/48fe5d7a43d278e9f6b5ada810b0a3530bbeac7ed7fcbcd366f932f05316/google_ai_generativelanguage-0.6.15.tar.gz", hash = "sha256:8f6d9dc4c12b065fe2d0289026171acea5183ebf2d0b11cefe12f3821e159ec3", size = 1375443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/a3/67b8a6ff5001a1d8864922f2d6488dc2a14367ceb651bc3f09a947f2f306/google_ai_generativelanguage-0.6.15-py3-none-any.whl", hash = "sha256:5a03ef86377aa184ffef3662ca28f19eeee158733e45d7947982eb953c6ebb6c", size = 1327356 }, +] + +[[package]] +name = "google-api-core" +version = "2.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/56/d70d66ed1b5ab5f6c27bf80ec889585ad8f865ff32acbafd3b2ef0bfb5d0/google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf", size = 162647 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/76/65b8b94e74bf1b6d1cc38d916089670c4da5029d25762441d8c5c19e51dd/google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9", size = 158576 }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status", version = "1.62.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "grpcio-status", version = "1.70.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] + +[[package]] +name = "google-api-python-client" +version = "2.159.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "httplib2" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/12b58cca5a93d63fd6a7abed570423bdf2db4349eb9361ac5214d42ed7d6/google_api_python_client-2.159.0.tar.gz", hash = "sha256:55197f430f25c907394b44fa078545ffef89d33fd4dca501b7db9f0d8e224bd6", size = 12302576 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/ab/d0671375afe79e6e8c51736e115a69bb6b4bcdc80cd5c01bf667486cd24c/google_api_python_client-2.159.0-py2.py3-none-any.whl", hash = "sha256:baef0bb631a60a0bd7c0bf12a5499e3a40cd4388484de7ee55c1950bf820a0cf", size = 12814228 }, +] + +[[package]] +name = "google-auth" +version = "2.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/eb/d504ba1daf190af6b204a9d4714d457462b486043744901a6eeea711f913/google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4", size = 270866 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/47/603554949a37bca5b7f894d51896a9c534b9eab808e2520a748e081669d0/google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a", size = 210770 }, +] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "smmap" }, + { name = "google-auth" }, + { name = "httplib2" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684 } +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794 }, + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253 }, ] [[package]] -name = "gitpython" -version = "3.1.44" +name = "google-generativeai" +version = "0.8.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "gitdb" }, + { name = "google-ai-generativelanguage" }, + { name = "google-api-core" }, + { name = "google-api-python-client" }, + { name = "google-auth" }, + { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pydantic" }, + { name = "tqdm" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/89/37df0b71473153574a5cdef8f242de422a0f5d26d7a9e231e6f169b4ad14/gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269", size = 214196 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/9a/4114a9057db2f1462d5c8f8390ab7383925fe1ac012eaa42402ad65c2963/GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110", size = 207599 }, + { url = "https://files.pythonhosted.org/packages/9b/b0/6c6af327a8a6ef3be6fe79be1d6f1e2914d6c363aa6b081b93396f4460a7/google_generativeai-0.8.4-py3-none-any.whl", hash = "sha256:e987b33ea6decde1e69191ddcaec6ef974458864d243de7191db50c21a7c5b82", size = 175409 }, ] [[package]] @@ -909,6 +998,99 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/e7/662ca14bfe05faf40375969fbb1113bba97fe3ff22d38f44eedeeff2c0b0/groq-0.15.0-py3-none-any.whl", hash = "sha256:c200558b67fee4b4f2bb89cc166337e3419a68c23280065770f8f8b0729c79ef", size = 109563 }, ] +[[package]] +name = "grpcio" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/e1/4b21b5017c33f3600dcc32b802bb48fe44a4d36d6c066f52650c7c2690fa/grpcio-1.70.0.tar.gz", hash = "sha256:8d1584a68d5922330025881e63a6c1b54cc8117291d382e4fa69339b6d914c56", size = 12788932 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/e9/f72408bac1f7b05b25e4df569b02d6b200c8e7857193aa9f1df7a3744add/grpcio-1.70.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851", size = 5229736 }, + { url = "https://files.pythonhosted.org/packages/b3/17/e65139ea76dac7bcd8a3f17cbd37e3d1a070c44db3098d0be5e14c5bd6a1/grpcio-1.70.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf", size = 11432751 }, + { url = "https://files.pythonhosted.org/packages/a0/12/42de6082b4ab14a59d30b2fc7786882fdaa75813a4a4f3d4a8c4acd6ed59/grpcio-1.70.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:374d014f29f9dfdb40510b041792e0e2828a1389281eb590df066e1cc2b404e5", size = 5711439 }, + { url = "https://files.pythonhosted.org/packages/34/f8/b5a19524d273cbd119274a387bb72d6fbb74578e13927a473bc34369f079/grpcio-1.70.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2af68a6f5c8f78d56c145161544ad0febbd7479524a59c16b3e25053f39c87f", size = 6330777 }, + { url = "https://files.pythonhosted.org/packages/1a/67/3d6c0ad786238aac7fa93b79246fc452978fbfe9e5f86f70da8e8a2797d0/grpcio-1.70.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7df14b2dcd1102a2ec32f621cc9fab6695effef516efbc6b063ad749867295", size = 5944639 }, + { url = "https://files.pythonhosted.org/packages/76/0d/d9f7cbc41c2743cf18236a29b6a582f41bd65572a7144d92b80bc1e68479/grpcio-1.70.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c78b339869f4dbf89881e0b6fbf376313e4f845a42840a7bdf42ee6caed4b11f", size = 6643543 }, + { url = "https://files.pythonhosted.org/packages/fc/24/bdd7e606b3400c14330e33a4698fa3a49e38a28c9e0a831441adbd3380d2/grpcio-1.70.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58ad9ba575b39edef71f4798fdb5c7b6d02ad36d47949cd381d4392a5c9cbcd3", size = 6199897 }, + { url = "https://files.pythonhosted.org/packages/d1/33/8132eb370087960c82d01b89faeb28f3e58f5619ffe19889f57c58a19c18/grpcio-1.70.0-cp310-cp310-win32.whl", hash = "sha256:2b0d02e4b25a5c1f9b6c7745d4fa06efc9fd6a611af0fb38d3ba956786b95199", size = 3617513 }, + { url = "https://files.pythonhosted.org/packages/99/bc/0fce5cfc0ca969df66f5dca6cf8d2258abb88146bf9ab89d8cf48e970137/grpcio-1.70.0-cp310-cp310-win_amd64.whl", hash = "sha256:0de706c0a5bb9d841e353f6343a9defc9fc35ec61d6eb6111802f3aa9fef29e1", size = 4303342 }, + { url = "https://files.pythonhosted.org/packages/65/c4/1f67d23d6bcadd2fd61fb460e5969c52b3390b4a4e254b5e04a6d1009e5e/grpcio-1.70.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:17325b0be0c068f35770f944124e8839ea3185d6d54862800fc28cc2ffad205a", size = 5229017 }, + { url = "https://files.pythonhosted.org/packages/e4/bd/cc36811c582d663a740fb45edf9f99ddbd99a10b6ba38267dc925e1e193a/grpcio-1.70.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:dbe41ad140df911e796d4463168e33ef80a24f5d21ef4d1e310553fcd2c4a386", size = 11472027 }, + { url = "https://files.pythonhosted.org/packages/7e/32/8538bb2ace5cd72da7126d1c9804bf80b4fe3be70e53e2d55675c24961a8/grpcio-1.70.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5ea67c72101d687d44d9c56068328da39c9ccba634cabb336075fae2eab0d04b", size = 5707785 }, + { url = "https://files.pythonhosted.org/packages/ce/5c/a45f85f2a0dfe4a6429dee98717e0e8bd7bd3f604315493c39d9679ca065/grpcio-1.70.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb5277db254ab7586769e490b7b22f4ddab3876c490da0a1a9d7c695ccf0bf77", size = 6331599 }, + { url = "https://files.pythonhosted.org/packages/9f/e5/5316b239380b8b2ad30373eb5bb25d9fd36c0375e94a98a0a60ea357d254/grpcio-1.70.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7831a0fc1beeeb7759f737f5acd9fdcda520e955049512d68fda03d91186eea", size = 5940834 }, + { url = "https://files.pythonhosted.org/packages/05/33/dbf035bc6d167068b4a9f2929dfe0b03fb763f0f861ecb3bb1709a14cb65/grpcio-1.70.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:27cc75e22c5dba1fbaf5a66c778e36ca9b8ce850bf58a9db887754593080d839", size = 6641191 }, + { url = "https://files.pythonhosted.org/packages/4c/c4/684d877517e5bfd6232d79107e5a1151b835e9f99051faef51fed3359ec4/grpcio-1.70.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d63764963412e22f0491d0d32833d71087288f4e24cbcddbae82476bfa1d81fd", size = 6198744 }, + { url = "https://files.pythonhosted.org/packages/e9/43/92fe5eeaf340650a7020cfb037402c7b9209e7a0f3011ea1626402219034/grpcio-1.70.0-cp311-cp311-win32.whl", hash = "sha256:bb491125103c800ec209d84c9b51f1c60ea456038e4734688004f377cfacc113", size = 3617111 }, + { url = "https://files.pythonhosted.org/packages/55/15/b6cf2c9515c028aff9da6984761a3ab484a472b0dc6435fcd07ced42127d/grpcio-1.70.0-cp311-cp311-win_amd64.whl", hash = "sha256:d24035d49e026353eb042bf7b058fb831db3e06d52bee75c5f2f3ab453e71aca", size = 4304604 }, + { url = "https://files.pythonhosted.org/packages/4c/a4/ddbda79dd176211b518f0f3795af78b38727a31ad32bc149d6a7b910a731/grpcio-1.70.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:ef4c14508299b1406c32bdbb9fb7b47612ab979b04cf2b27686ea31882387cff", size = 5198135 }, + { url = "https://files.pythonhosted.org/packages/30/5c/60eb8a063ea4cb8d7670af8fac3f2033230fc4b75f62669d67c66ac4e4b0/grpcio-1.70.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:aa47688a65643afd8b166928a1da6247d3f46a2784d301e48ca1cc394d2ffb40", size = 11447529 }, + { url = "https://files.pythonhosted.org/packages/fb/b9/1bf8ab66729f13b44e8f42c9de56417d3ee6ab2929591cfee78dce749b57/grpcio-1.70.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:880bfb43b1bb8905701b926274eafce5c70a105bc6b99e25f62e98ad59cb278e", size = 5664484 }, + { url = "https://files.pythonhosted.org/packages/d1/06/2f377d6906289bee066d96e9bdb91e5e96d605d173df9bb9856095cccb57/grpcio-1.70.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e654c4b17d07eab259d392e12b149c3a134ec52b11ecdc6a515b39aceeec898", size = 6303739 }, + { url = "https://files.pythonhosted.org/packages/ae/50/64c94cfc4db8d9ed07da71427a936b5a2bd2b27c66269b42fbda82c7c7a4/grpcio-1.70.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2394e3381071045a706ee2eeb6e08962dd87e8999b90ac15c55f56fa5a8c9597", size = 5910417 }, + { url = "https://files.pythonhosted.org/packages/53/89/8795dfc3db4389c15554eb1765e14cba8b4c88cc80ff828d02f5572965af/grpcio-1.70.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b3c76701428d2df01964bc6479422f20e62fcbc0a37d82ebd58050b86926ef8c", size = 6626797 }, + { url = "https://files.pythonhosted.org/packages/9c/b2/6a97ac91042a2c59d18244c479ee3894e7fb6f8c3a90619bb5a7757fa30c/grpcio-1.70.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac073fe1c4cd856ebcf49e9ed6240f4f84d7a4e6ee95baa5d66ea05d3dd0df7f", size = 6190055 }, + { url = "https://files.pythonhosted.org/packages/86/2b/28db55c8c4d156053a8c6f4683e559cd0a6636f55a860f87afba1ac49a51/grpcio-1.70.0-cp312-cp312-win32.whl", hash = "sha256:cd24d2d9d380fbbee7a5ac86afe9787813f285e684b0271599f95a51bce33528", size = 3600214 }, + { url = "https://files.pythonhosted.org/packages/17/c3/a7a225645a965029ed432e5b5e9ed959a574e62100afab553eef58be0e37/grpcio-1.70.0-cp312-cp312-win_amd64.whl", hash = "sha256:0495c86a55a04a874c7627fd33e5beaee771917d92c0e6d9d797628ac40e7655", size = 4292538 }, + { url = "https://files.pythonhosted.org/packages/68/38/66d0f32f88feaf7d83f8559cd87d899c970f91b1b8a8819b58226de0a496/grpcio-1.70.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa573896aeb7d7ce10b1fa425ba263e8dddd83d71530d1322fd3a16f31257b4a", size = 5199218 }, + { url = "https://files.pythonhosted.org/packages/c1/96/947df763a0b18efb5cc6c2ae348e56d97ca520dc5300c01617b234410173/grpcio-1.70.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:d405b005018fd516c9ac529f4b4122342f60ec1cee181788249372524e6db429", size = 11445983 }, + { url = "https://files.pythonhosted.org/packages/fd/5b/f3d4b063e51b2454bedb828e41f3485800889a3609c49e60f2296cc8b8e5/grpcio-1.70.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f32090238b720eb585248654db8e3afc87b48d26ac423c8dde8334a232ff53c9", size = 5663954 }, + { url = "https://files.pythonhosted.org/packages/bd/0b/dab54365fcedf63e9f358c1431885478e77d6f190d65668936b12dd38057/grpcio-1.70.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa089a734f24ee5f6880c83d043e4f46bf812fcea5181dcb3a572db1e79e01c", size = 6304323 }, + { url = "https://files.pythonhosted.org/packages/76/a8/8f965a7171ddd336ce32946e22954aa1bbc6f23f095e15dadaa70604ba20/grpcio-1.70.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f19375f0300b96c0117aca118d400e76fede6db6e91f3c34b7b035822e06c35f", size = 5910939 }, + { url = "https://files.pythonhosted.org/packages/1b/05/0bbf68be8b17d1ed6f178435a3c0c12e665a1e6054470a64ce3cb7896596/grpcio-1.70.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7c73c42102e4a5ec76608d9b60227d917cea46dff4d11d372f64cbeb56d259d0", size = 6631405 }, + { url = "https://files.pythonhosted.org/packages/79/6a/5df64b6df405a1ed1482cb6c10044b06ec47fd28e87c2232dbcf435ecb33/grpcio-1.70.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:0a5c78d5198a1f0aa60006cd6eb1c912b4a1520b6a3968e677dbcba215fabb40", size = 6190982 }, + { url = "https://files.pythonhosted.org/packages/42/aa/aeaac87737e6d25d1048c53b8ec408c056d3ed0c922e7c5efad65384250c/grpcio-1.70.0-cp313-cp313-win32.whl", hash = "sha256:fe9dbd916df3b60e865258a8c72ac98f3ac9e2a9542dcb72b7a34d236242a5ce", size = 3598359 }, + { url = "https://files.pythonhosted.org/packages/1f/79/8edd2442d2de1431b4a3de84ef91c37002f12de0f9b577fb07b452989dbc/grpcio-1.70.0-cp313-cp313-win_amd64.whl", hash = "sha256:4119fed8abb7ff6c32e3d2255301e59c316c22d31ab812b3fbcbaf3d0d87cc68", size = 4293938 }, + { url = "https://files.pythonhosted.org/packages/9d/0e/64061c9746a2dd6e07cb0a0f3829f0a431344add77ec36397cc452541ff6/grpcio-1.70.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:4f1937f47c77392ccd555728f564a49128b6a197a05a5cd527b796d36f3387d0", size = 5231123 }, + { url = "https://files.pythonhosted.org/packages/72/9f/c93501d5f361aecee0146ab19300d5acb1c2747b00217c641f06fffbcd62/grpcio-1.70.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:0cd430b9215a15c10b0e7d78f51e8a39d6cf2ea819fd635a7214fae600b1da27", size = 11467217 }, + { url = "https://files.pythonhosted.org/packages/0a/1a/980d115b701023450a304881bf3f6309f6fb15787f9b78d2728074f3bf86/grpcio-1.70.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:e27585831aa6b57b9250abaf147003e126cd3a6c6ca0c531a01996f31709bed1", size = 5710913 }, + { url = "https://files.pythonhosted.org/packages/a0/84/af420067029808f9790e98143b3dd0f943bebba434a4706755051a520c91/grpcio-1.70.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1af8e15b0f0fe0eac75195992a63df17579553b0c4af9f8362cc7cc99ccddf4", size = 6330947 }, + { url = "https://files.pythonhosted.org/packages/24/1c/e1f06a7d29a1fa5053dcaf5352a50f8e1f04855fd194a65422a9d685d375/grpcio-1.70.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbce24409beaee911c574a3d75d12ffb8c3e3dd1b813321b1d7a96bbcac46bf4", size = 5943913 }, + { url = "https://files.pythonhosted.org/packages/41/8f/de13838e4467519a50cd0693e98b0b2bcc81d656013c38a1dd7dcb801526/grpcio-1.70.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ff4a8112a79464919bb21c18e956c54add43ec9a4850e3949da54f61c241a4a6", size = 6643236 }, + { url = "https://files.pythonhosted.org/packages/ac/73/d68c745d34e43a80440da4f3d79fa02c56cb118c2a26ba949f3cfd8316d7/grpcio-1.70.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5413549fdf0b14046c545e19cfc4eb1e37e9e1ebba0ca390a8d4e9963cab44d2", size = 6199038 }, + { url = "https://files.pythonhosted.org/packages/7e/dd/991f100b8c31636b4bb2a941dbbf54dbcc55d69c722cfa038c3d017eaa0c/grpcio-1.70.0-cp39-cp39-win32.whl", hash = "sha256:b745d2c41b27650095e81dea7091668c040457483c9bdb5d0d9de8f8eb25e59f", size = 3617512 }, + { url = "https://files.pythonhosted.org/packages/4d/80/1aa2ba791207a13e314067209b48e1a0893ed8d1f43ef012e194aaa6c2de/grpcio-1.70.0-cp39-cp39-win_amd64.whl", hash = "sha256:a31d7e3b529c94e930a117b2175b2efd179d96eb3c7a21ccb0289a8ab05b645c", size = 4303506 }, +] + +[[package]] +name = "grpcio-status" +version = "1.62.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", + "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", +] +dependencies = [ + { name = "googleapis-common-protos", marker = "python_full_version < '3.10'" }, + { name = "grpcio", marker = "python_full_version < '3.10'" }, + { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/013ef01c5a1c2fd0932c27c904934162f69f41ca0f28396d3ffe4d386123/grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485", size = 13063 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8", size = 14448 }, +] + +[[package]] +name = "grpcio-status" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", +] +dependencies = [ + { name = "googleapis-common-protos", marker = "python_full_version >= '3.10'" }, + { name = "grpcio", marker = "python_full_version >= '3.10'" }, + { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/d1/2397797c810020eac424e1aac10fbdc5edb6b9b4ad6617e0ed53ca907653/grpcio_status-1.70.0.tar.gz", hash = "sha256:0e7b42816512433b18b9d764285ff029bde059e9d41f8fe10a60631bd8348101", size = 13681 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/34/49e558040e069feebac70cdd1b605f38738c0277ac5d38e2ce3d03e1b1ec/grpcio_status-1.70.0-py3-none-any.whl", hash = "sha256:fc5a2ae2b9b1c1969cc49f3262676e6854aa2398ec69cb5bd6c47cd501904a85", size = 14429 }, +] + [[package]] name = "h11" version = "0.14.0" @@ -931,6 +1113,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, ] +[[package]] +name = "httplib2" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/ad/2371116b22d616c194aa25ec410c9c6c37f23599dcd590502b74db197584/httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81", size = 351116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/6c/d2fbdaaa5959339d53ba38e94c123e4e84b8fbc4b84beb0e70d7c1608486/httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", size = 96854 }, +] + [[package]] name = "httptools" version = "0.6.4" @@ -1049,8 +1243,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "zipp", marker = "python_full_version >= '3.10'" }, @@ -1477,8 +1673,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368 } wheels = [ @@ -1584,8 +1782,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "deprecated", marker = "python_full_version >= '3.10'" }, @@ -1620,8 +1820,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "opentelemetry-proto", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -1661,8 +1863,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "deprecated", marker = "python_full_version >= '3.10'" }, @@ -1701,8 +1905,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -1737,8 +1943,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -1770,8 +1978,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "deprecated", marker = "python_full_version >= '3.10'" }, @@ -1823,18 +2033,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] -[[package]] -name = "prompt-toolkit" -version = "3.0.48" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wcwidth" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 }, -] - [[package]] name = "propcache" version = "0.2.1" @@ -1924,6 +2122,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 }, ] +[[package]] +name = "proto-plus" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/79/a5c6cbb42268cfd3ddc652dc526889044a8798c688a03ff58e5e92b743c8/proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22", size = 56136 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/c3/59308ccc07b34980f9d532f7afc718a9f32b40e52cde7a740df8d55632fb/proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7", size = 50166 }, +] + [[package]] name = "protobuf" version = "4.25.5" @@ -1951,8 +2162,10 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945 } wheels = [ @@ -1981,6 +2194,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7c/06/63872a64c312a24fb9b4af123ee7007a306617da63ff13bcc1432386ead7/psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0", size = 251988 }, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/67/6afbf0d507f73c32d21084a79946bfcfca5fbc62a72057e9c23797a737c9/pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c", size = 310028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/89/bc88a6711935ba795a679ea6ebee07e128050d6382eaa35a0a47c8032bdc/pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd", size = 181537 }, +] + [[package]] name = "pydantic" version = "2.10.5" @@ -2092,18 +2326,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961 }, ] -[[package]] -name = "pydot" -version = "3.0.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyparsing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/dd/e0e6a4fb84c22050f6a9701ad9fd6a67ef82faa7ba97b97eb6fdc6b49b34/pydot-3.0.4.tar.gz", hash = "sha256:3ce88b2558f3808b0376f22bfa6c263909e1c3981e2a7b629b65b451eee4a25d", size = 168167 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/5f/1ebfd430df05c4f9e438dd3313c4456eab937d976f6ab8ce81a98f9fb381/pydot-3.0.4-py3-none-any.whl", hash = "sha256:bfa9c3fc0c44ba1d132adce131802d7df00429d1a79cc0346b0a5cd374dbe9c6", size = 35776 }, -] - [[package]] name = "pyfakefs" version = "5.7.4" @@ -2608,6 +2830,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/ad/03b5ccd1ab492c9dece85b3bf1c96453ab8c47983936fae6880f688f60b3/rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6", size = 233013 }, ] +[[package]] +name = "rsa" +version = "4.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/65/7d973b89c4d2351d7fb232c2e452547ddfa243e93131e7cfa766da627b52/rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21", size = 29711 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", size = 34315 }, +] + [[package]] name = "ruff" version = "0.9.1" @@ -2691,15 +2925,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] -[[package]] -name = "smmap" -version = "5.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303 }, -] - [[package]] name = "sniffio" version = "1.3.1" @@ -2722,45 +2947,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225 }, ] -[[package]] -name = "stdlib-list" -version = "0.11.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5d/04/6b37a71e92ddca16b190b7df62494ac4779d58ced4787f73584eb32c8f03/stdlib_list-0.11.0.tar.gz", hash = "sha256:b74a7b643a77a12637e907f3f62f0ab9f67300bce4014f6b2d3c8b4c8fd63c66", size = 60335 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/fe/e07300c027a868d32d8ed7a425503401e91a03ff90e7ca525c115c634ffb/stdlib_list-0.11.0-py3-none-any.whl", hash = "sha256:8bf8decfffaaf273d4cfeb5bd852b910a00dec1037dcf163576803622bccf597", size = 83617 }, -] - -[[package]] -name = "tach" -version = "0.20.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "gitpython" }, - { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "prompt-toolkit" }, - { name = "pydot" }, - { name = "pyyaml" }, - { name = "rich" }, - { name = "stdlib-list", marker = "python_full_version < '3.10'" }, - { name = "tomli" }, - { name = "tomli-w" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/54/c8/4064f6e97abeda0dd5a68a23a9cc46f236850d8247f124847ae3f03f86ff/tach-0.20.0.tar.gz", hash = "sha256:65ec25354c36c1305a7abfae33f138e9b6026266a19507ff4724f3dda9b55c67", size = 738845 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/53/ce/39fe1253b2141f72d290d64d0b4b47ebed99b15849b0b1c42827054f3590/tach-0.20.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:28b2869a3ec2b9a8f558f472d35ad1d237024361bc3137fbc3e1f0e5f42b0bf5", size = 3070560 }, - { url = "https://files.pythonhosted.org/packages/05/ae/259dbb866ba38688e51a1da38d47c1da0878ea236e01486cddd7aed2b7cc/tach-0.20.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:c7bc8b325b41e2561cf9bace6a998fd391b45aeb37dd8011cfc311f4e6426f60", size = 2930725 }, - { url = "https://files.pythonhosted.org/packages/61/1b/c438601f76d3576200f4335c0d524377aebd20b18e09f07ef19e25fc338f/tach-0.20.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49804f15b5a03b7b39d476f1b46330442c637ab908c693fa6b26c57f707ca070", size = 3265779 }, - { url = "https://files.pythonhosted.org/packages/c0/36/56234b75760fa1ab02e83d16a7e75e0894266d8a9b4ea4e4d07a76b9be54/tach-0.20.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7051e2c5ccccd9d740bd7b33339117470aad7a0425fdd8c12a4f234a3f6d0896", size = 3233228 }, - { url = "https://files.pythonhosted.org/packages/92/77/01527cfa0f8c4c6cbf75f28d5a0316ceba44211ba9d949ca92068fdf77a7/tach-0.20.0-cp37-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69e4a810e0f35565e523545f191b85123c207487fe7ad6df63b2e3b514bfd0ad", size = 3523062 }, - { url = "https://files.pythonhosted.org/packages/26/8a/bd9fb362c9638811660a19eaa7283850ed675f79ee0e082e83c8563c738a/tach-0.20.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511af3a651e3cf5329162b008295296d25f3ad9b0713bc4a93b78958874b2b4b", size = 3529428 }, - { url = "https://files.pythonhosted.org/packages/92/c2/7e01d870a79d65e0cceb621eac43c925f0bd96748c4da0039f5594e64f89/tach-0.20.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a80ba230299950493986dec04998a8ea231c9473c0d0b506cf67f139f640757", size = 3769550 }, - { url = "https://files.pythonhosted.org/packages/a1/38/1ac3e633ddf775e2c76d6daa8f345f02db2252b02b83970ca15fbe8504bd/tach-0.20.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aba656fd46e89a236d9b30610851010b200e7ae25db3053d1d852f6cc0357640", size = 3387869 }, - { url = "https://files.pythonhosted.org/packages/59/74/3ebe4994b0569a4b53b5963ad4b63ca91277a543c841cc4934132030f325/tach-0.20.0-cp37-abi3-win32.whl", hash = "sha256:653455ff1da0aebfdd7408905aae13747a7144ee98490d93778447f56330fa4b", size = 2608869 }, - { url = "https://files.pythonhosted.org/packages/7f/41/8d1d42e4de71e2894efe0e2ffd88e870252179df93335d0e7f04edd436b6/tach-0.20.0-cp37-abi3-win_amd64.whl", hash = "sha256:efdefa94bf899306fcb265ca603a419a24d2d81cc82d6547f4222077a37fa474", size = 2801132 }, -] - [[package]] name = "tenacity" version = "8.5.0" @@ -2885,15 +3071,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, ] -[[package]] -name = "tomli-w" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675 }, -] - [[package]] name = "tqdm" version = "4.67.1" @@ -2927,7 +3104,8 @@ version = "2.31.0.6" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] @@ -2945,7 +3123,8 @@ version = "2.32.0.20241016" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ { name = "urllib3", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and platform_python_implementation != 'PyPy'" }, @@ -2986,13 +3165,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827 }, ] +[[package]] +name = "uritemplate" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/4742fdba39cd02a56226815abfa72fe0aa81c33bed16ed045647d6000eba/uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0", size = 273898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c0/7461b49cd25aeece13766f02ee576d1db528f1c37ce69aee300e075b485b/uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e", size = 10356 }, +] + [[package]] name = "urllib3" version = "1.26.20" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] @@ -3007,7 +3196,8 @@ version = "2.3.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.10' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } wheels = [ @@ -3170,15 +3360,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7a/e9/3cbcf4d70cd0b6d3f30631deae1bf37cc0be39887ca327a44462fe546bf5/watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42", size = 452488 }, ] -[[package]] -name = "wcwidth" -version = "0.2.13" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, -] - [[package]] name = "websockets" version = "14.1" From 2682a1b1922bdc13fd7b4b00d4677d7e09763ea2 Mon Sep 17 00:00:00 2001 From: Teo Date: Mon, 27 Jan 2025 20:06:04 +0200 Subject: [PATCH 02/31] Handle colored log outputs Rich formatted text with various styles (color, bold, italic) Direct ANSI codes with proper newlines Mixed color text in both stdout and stderr All ANSI codes are preserved in the logged output Signed-off-by: Teo --- agentops/log_capture.py | 65 ++++++++++++++++++++--------------------- 1 file changed, 32 insertions(+), 33 deletions(-) diff --git a/agentops/log_capture.py b/agentops/log_capture.py index 81d16c6f2..1ae298d3a 100644 --- a/agentops/log_capture.py +++ b/agentops/log_capture.py @@ -217,15 +217,21 @@ class _StdoutProxy: def __init__(self, capture): self._capture = capture self._logger = capture._stdout_logger + self._original_stdout = capture._stdout def write(self, text): if text.strip(): # Only log non-empty strings self._capture.stdout_line_count += 1 self._capture.log_level_counts["INFO"] += 1 - self._logger.info(text.rstrip()) + # Get raw console output with ANSI codes + raw_text = text if isinstance(text, str) else str(text) + # Write to original stdout for display + self._original_stdout.write(raw_text) + # Log the raw text with ANSI codes preserved + self._logger.info(raw_text, extra={"raw": True, "preserve_color": True}) def flush(self): - pass + self._original_stdout.flush() class _StderrProxy: """Proxies stderr to logger""" @@ -233,15 +239,19 @@ class _StderrProxy: def __init__(self, capture): self._capture = capture self._logger = capture._stderr_logger + self._original_stderr = capture._stderr def write(self, text): if text.strip(): # Only log non-empty strings self._capture.stderr_line_count += 1 self._capture.log_level_counts["ERROR"] += 1 - self._logger.error(text.rstrip()) + # Write to original stderr for display + self._original_stderr.write(text) + # Log the raw text with ANSI codes preserved + self._logger.error(text, extra={"raw": True, "preserve_color": True}) def flush(self): - pass + self._original_stderr.flush() if __name__ == "__main__": @@ -270,37 +280,26 @@ class MockSession: # Create and start capture capture = LogCapture(session_id=session.session_id) capture.start() - try: - print("Regular stdout message") - print("Multi-line stdout message\nwith a second line") - sys.stderr.write("Error message to stderr\n") - - # Show that empty lines are ignored - print("") - print("\n\n") - - # Demonstrate concurrent output - def background_prints(): - for i in range(3): - time.sleep(0.5) - print(f"Background message {i}") - sys.stderr.write(f"Background error {i}\n") - - import threading - - thread = threading.Thread(target=background_prints) - thread.start() - - # Main thread output - for i in range(3): - time.sleep(0.7) - print(f"Main thread message {i}") - - thread.join() + # Test Rich formatting + from rich.console import Console + console = Console(force_terminal=True) + rprint = console.print + rprint("[red]This is red text[/red]") + rprint("[blue]Blue[/blue] and [green]green[/green] mixed") + rprint("[bold red]Bold red[/bold red] and [italic blue]italic blue[/italic blue]") + + # Test raw ANSI codes + print("\033[31mDirect red ANSI\033[0m\n") + print("\033[34mBlue\033[0m and \033[32mgreen\033[0m mixed ANSI\n") + print("\033[1;31mBold red ANSI\033[0m\n") + + # Test stderr with colors + sys.stderr.write("\033[35mMagenta error\033[0m\n") + sys.stderr.write("\033[33mYellow warning\033[0m\n") finally: # Stop capture and show normal output is restored capture.stop() - print("\nCapture stopped - this prints normally to stdout") - sys.stderr.write("This error goes normally to stderr\n") + # print("\nCapture stopped - this prints normally to stdout") + # sys.stderr.write("This error goes normally to stderr\n") From 8ed3258683c4f780d55dc8e0458df5144976569b Mon Sep 17 00:00:00 2001 From: Teo Date: Mon, 27 Jan 2025 20:10:51 +0200 Subject: [PATCH 03/31] raise exc if session not found Signed-off-by: Teo --- agentops/log_capture.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/agentops/log_capture.py b/agentops/log_capture.py index 1ae298d3a..a920d94e3 100644 --- a/agentops/log_capture.py +++ b/agentops/log_capture.py @@ -78,9 +78,12 @@ def session(self) -> Optional["Session"]: def start(self): """Start capturing output using OTEL logging handler""" - if self._stdout is not None or not self.session: + if self._stdout is not None: return + if not self.session: + raise ValueError(f"No active session found with ID {self.session_id}") + from agentops.helpers import get_ISO_time self.start_time = get_ISO_time() From 0395eb50b49f9bd1aafbb1eed16cf599e58c9cf4 Mon Sep 17 00:00:00 2001 From: Teo Date: Mon, 27 Jan 2025 20:10:54 +0200 Subject: [PATCH 04/31] tests: unit/test_log_capture.py Signed-off-by: Teo --- tests/unit/test_log_capture.py | 194 +++++++++++++++++++++++++++++++++ 1 file changed, 194 insertions(+) create mode 100644 tests/unit/test_log_capture.py diff --git a/tests/unit/test_log_capture.py b/tests/unit/test_log_capture.py new file mode 100644 index 000000000..83ff57ff8 --- /dev/null +++ b/tests/unit/test_log_capture.py @@ -0,0 +1,194 @@ +import logging +import sys +from io import StringIO +from unittest.mock import patch +from uuid import uuid4 + +import pytest +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter +from opentelemetry.sdk.resources import Resource +from rich.console import Console + +from agentops.instrumentation import set_log_handler +from agentops.log_capture import LogCapture + + +@pytest.fixture +def session_id(): + return uuid4() + + +@pytest.fixture +def mock_session(session_id): + """Create a mock session""" + + class MockSession: + def __init__(self, session_id): + self.session_id = session_id + + return MockSession(session_id) + + +@pytest.fixture +def logger_provider(): + """Set up OpenTelemetry logging""" + resource = Resource.create( + { + "service.name": "test-service", + } + ) + provider = LoggerProvider(resource=resource) + exporter = ConsoleLogExporter() + provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) + return provider + + +@pytest.fixture +def capture(session_id, logger_provider, mock_session): + """Set up LogCapture with OpenTelemetry logging""" + handler = LoggingHandler( + level=logging.INFO, + logger_provider=logger_provider, + ) + set_log_handler(handler) + + # Mock the session registry to return our mock session + with patch("agentops.session.get_active_sessions", return_value=[mock_session]): + capture = LogCapture(session_id=session_id) + yield capture + + set_log_handler(None) # Clean up + + +def test_basic_stdout_capture(capture): + """Test capturing basic stdout output""" + test_output = "Hello, world!" + + capture.start() + try: + print(test_output) + finally: + capture.stop() + + assert capture.stdout_line_count == 1 + assert capture.stderr_line_count == 0 + assert capture.log_level_counts["INFO"] == 1 + assert not capture.is_capturing + + +def test_basic_stderr_capture(capture): + """Test capturing basic stderr output""" + test_error = "Error message" + + capture.start() + try: + sys.stderr.write(test_error + "\n") + finally: + capture.stop() + + assert capture.stdout_line_count == 0 + assert capture.stderr_line_count == 1 + assert capture.log_level_counts["ERROR"] == 1 + + +def test_rich_color_capture(capture): + """Test capturing Rich colored output""" + capture.start() + try: + console = Console(force_terminal=True) + console.print("[red]Colored[/red] text") + finally: + capture.stop() + + assert capture.stdout_line_count == 1 + assert capture.log_level_counts["INFO"] == 1 + + +def test_ansi_color_capture(capture): + """Test capturing raw ANSI colored output""" + capture.start() + try: + print("\033[31mRed\033[0m text") + sys.stderr.write("\033[34mBlue\033[0m error\n") + finally: + capture.stop() + + assert capture.stdout_line_count == 1 + assert capture.stderr_line_count == 1 + assert capture.log_level_counts["INFO"] == 1 + assert capture.log_level_counts["ERROR"] == 1 + + +def test_span_data_transformation(capture, session_id): + """Test converting log capture to span data""" + capture.start() + try: + print("Info message") + sys.stderr.write("Error message\n") + finally: + capture.stop() + + span_data = capture.to_span_data() + + # Check basic attributes + assert span_data["session.id"] == str(session_id) + assert span_data["log.stdout_count"] == 1 + assert span_data["log.stderr_count"] == 1 + assert span_data["log.is_capturing"] is False + + # Check log level counts + assert span_data["log.level.info"] == 1 + assert span_data["log.level.error"] == 1 + + # Check timing data + assert "log.start_time" in span_data + assert "log.end_time" in span_data + assert "log.duration_seconds" in span_data + assert span_data["log.duration_seconds"] > 0 + + +def test_empty_lines_ignored(capture): + """Test that empty lines are not counted""" + capture.start() + try: + print("") + print("\n") + print(" ") + sys.stderr.write("\n") + finally: + capture.stop() + + assert capture.stdout_line_count == 0 + assert capture.stderr_line_count == 0 + assert sum(capture.log_level_counts.values()) == 0 + + +def test_multiple_captures(capture): + """Test starting and stopping capture multiple times""" + # First capture + capture.start() + print("First") + capture.stop() + + assert capture.stdout_line_count == 1 + + # Second capture + capture.start() + print("Second") + sys.stderr.write("Error\n") + capture.stop() + + assert capture.stdout_line_count == 2 + assert capture.stderr_line_count == 1 + + +def test_session_not_found(): + """Test that starting capture without a session raises an error""" + session_id = uuid4() + + # Create LogCapture without mocking session registry + capture = LogCapture(session_id=session_id) + + with pytest.raises(ValueError, match=f"No active session found with ID {session_id}"): + capture.start() From 5afd0d42b865b152ca30d181d48619a1e9ca9eb2 Mon Sep 17 00:00:00 2001 From: Teo Date: Mon, 27 Jan 2025 20:35:37 +0200 Subject: [PATCH 05/31] log_handler and SessionLogExporter Signed-off-by: Teo --- agentops/log_capture.py | 72 ++++++++++++++ agentops/session.py | 201 +++++++++++++++++++++++++++++----------- 2 files changed, 219 insertions(+), 54 deletions(-) diff --git a/agentops/log_capture.py b/agentops/log_capture.py index a920d94e3..30e1452a0 100644 --- a/agentops/log_capture.py +++ b/agentops/log_capture.py @@ -257,6 +257,78 @@ def flush(self): self._original_stderr.flush() +class SessionLogHandler(LoggingHandler): + """A logging handler that captures logs for a specific session without altering output. + + This handler captures logs and associates them with a specific session, while allowing + normal logging behavior to continue unaffected. + """ + + def __init__(self, session_id: UUID, logger_provider=None): + super().__init__(level=logging.INFO, logger_provider=logger_provider) + self.session_id = session_id + self.log_counts: Dict[str, int] = { + "INFO": 0, + "WARNING": 0, + "ERROR": 0, + "DEBUG": 0, + "CRITICAL": 0 + } + + def emit(self, record: logging.LogRecord) -> None: + """Emit a log record, capturing it for the session without altering normal output.""" + try: + # Count the log by level + self.log_counts[record.levelname] += 1 + + # Create the log event with ANSI codes preserved + msg = self.format(record) + + # Let the parent class handle sending to OTEL + super().emit(record) + + except Exception: + self.handleError(record) + + +def install_session_handler(session: "Session") -> Optional[SessionLogHandler]: + """Install a logging handler for a specific session. + + Args: + session: The session to install the handler for + + Returns: + The installed handler, or None if installation failed + """ + try: + # Create handler with session's logger provider + handler = SessionLogHandler( + session_id=session.session_id, + logger_provider=session._logger_provider + ) + + # Add handler to root logger to capture all logs + logging.getLogger().addHandler(handler) + + return handler + + except Exception as e: + logging.error(f"Failed to install session log handler: {e}") + return None + + +def remove_session_handler(handler: SessionLogHandler) -> None: + """Remove a session's logging handler. + + Args: + handler: The handler to remove + """ + try: + logging.getLogger().removeHandler(handler) + except Exception as e: + logging.error(f"Failed to remove session log handler: {e}") + + if __name__ == "__main__": import os import sys diff --git a/agentops/session.py b/agentops/session.py index 0d67aa9d6..a9428fdb1 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -3,22 +3,25 @@ import asyncio import functools import json +import logging import threading from datetime import datetime, timezone from decimal import ROUND_HALF_UP, Decimal from enum import Enum -from typing import Any, Dict, List, Optional, Sequence, Union +from typing import Any, Dict, List, Optional, Sequence, Union, cast from uuid import UUID, uuid4 from opentelemetry import trace from opentelemetry.context import attach, detach, set_value +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler, LogRecord +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter, LogExportResult from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import ReadableSpan, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter, SpanExporter, SpanExportResult from termcolor import colored from .config import Configuration -from .event import ErrorEvent, Event +from .event import ErrorEvent, Event, EventType from .exceptions import ApiServerException from .helpers import filter_unjsonable, get_ISO_time, safe_serialize from .http_client import HttpClient, Response @@ -177,6 +180,45 @@ def shutdown(self) -> None: # Don't call session.end_session() here to avoid circular dependencies +class SessionLogExporter(LogExporter): + """ + Exports logs for a specific session to the AgentOps backend. + """ + + def __init__(self, session): + self.session = session + self._shutdown = False + + def export(self, batch: Sequence[LogRecord]) -> LogExportResult: + """ + Export the log records to the AgentOps backend. + """ + if self._shutdown: + return LogExportResult.SUCCESS + + try: + # TODO: Implement actual export logic + # For now, just print to console + for record in batch: + print(f"[Session {self.session.session_id}] {record.body}") + return LogExportResult.SUCCESS + except Exception as e: + print(f"Failed to export logs: {e}") + return LogExportResult.FAILURE + + def force_flush(self, timeout_millis: Optional[int] = None) -> bool: + """ + Force flush any pending logs. + """ + return True + + def shutdown(self) -> None: + """ + Shuts down the exporter. + """ + self._shutdown = True + + class Session: """ Represents a session of events, with a start and end state. @@ -265,6 +307,19 @@ def __init__( self._tracer_provider.add_span_processor(self._span_processor) + # Initialize logging components + resource = Resource.create({SERVICE_NAME: f"agentops.session.{str(session_id)}"}) + self._logger_provider = LoggerProvider(resource=resource) + self._log_exporter = SessionLogExporter(session=self) + self._log_processor = BatchLogRecordProcessor(self._log_exporter) + + # Create and install session-specific logging handler + self._log_handler = LoggingHandler( + level=logging.INFO, + logger_provider=self._logger_provider, + ) + logger.addHandler(self._log_handler) + def set_video(self, video: str) -> None: """ Sets a url to the video recording of the session. @@ -336,7 +391,28 @@ def end_session( finally: del self._span_processor - # 5. Final session update + # 5. Clean up logging components + if hasattr(self, "_log_handler"): + try: + # Remove and close the log handler + logger.removeHandler(self._log_handler) + self._log_handler.close() + except Exception as e: + logger.warning(f"Error during log handler cleanup: {e}") + finally: + del self._log_handler + + if hasattr(self, "_log_processor"): + try: + # Force flush and shutdown the log processor + self._log_processor.force_flush(timeout_millis=5000) + self._log_processor.shutdown() + except Exception as e: + logger.warning(f"Error during log processor cleanup: {e}") + finally: + del self._log_processor + + # 6. Final session update if not (analytics_stats := self.get_analytics()): return None @@ -422,44 +498,63 @@ def record(self, event: Union[Event, ErrorEvent], flush_now=False): token = attach(token) # Create a copy of event data to modify - event_data = dict(filter_unjsonable(event.__dict__)) + event_data = {} + for key, value in event.__dict__.items(): + if value is not None: + event_data[key] = value # Add required fields based on event type if isinstance(event, ErrorEvent): event_data["error_type"] = getattr(event, "error_type", event.event_type) - elif event.event_type == "actions": + elif isinstance(event.event_type, EventType) and event.event_type == EventType.ACTION: # Ensure action events have action_type - if "action_type" not in event_data: - event_data["action_type"] = event_data.get("name", "unknown_action") - if "name" not in event_data: - event_data["name"] = event_data.get("action_type", "unknown_action") - elif event.event_type == "tools": + if "action_type" not in event_data and "name" in event_data: + event_data["action_type"] = event_data["name"] + elif "name" not in event_data and "action_type" in event_data: + event_data["name"] = event_data["action_type"] + else: + event_data.setdefault("action_type", "unknown_action") + event_data.setdefault("name", "unknown_action") + elif isinstance(event.event_type, EventType) and event.event_type == EventType.TOOL: # Ensure tool events have name - if "name" not in event_data: - event_data["name"] = event_data.get("tool_name", "unknown_tool") - if "tool_name" not in event_data: - event_data["tool_name"] = event_data.get("name", "unknown_tool") + if "name" not in event_data and "tool_name" in event_data: + event_data["name"] = event_data["tool_name"] + elif "tool_name" not in event_data and "name" in event_data: + event_data["tool_name"] = event_data["name"] + else: + event_data.setdefault("name", "unknown_tool") + event_data.setdefault("tool_name", "unknown_tool") + + # Convert event type to string for span name + event_type_str = ( + event.event_type.value if isinstance(event.event_type, EventType) else str(event.event_type) + ) with self._otel_tracer.start_as_current_span( - name=event.event_type, + name=event_type_str, attributes={ "event.id": str(event.id), - "event.type": event.event_type, + "event.type": event_type_str, "event.timestamp": event.init_timestamp or get_ISO_time(), "event.end_timestamp": event.end_timestamp or get_ISO_time(), "session.id": str(self.session_id), "session.tags": ",".join(self.tags) if self.tags else "", - "event.data": json.dumps(event_data), + "event.data": safe_serialize(event_data), }, ) as span: - if event.event_type in self.event_counts: - self.event_counts[event.event_type] += 1 + if event_type_str in self.event_counts: + self.event_counts[event_type_str] += 1 if isinstance(event, ErrorEvent): span.set_attribute("error", True) if hasattr(event, "trigger_event") and event.trigger_event: span.set_attribute("trigger_event.id", str(event.trigger_event.id)) - span.set_attribute("trigger_event.type", event.trigger_event.event_type) + trigger_event_type = ( + event.trigger_event.event_type.value + if isinstance(event.trigger_event.event_type, EventType) + else str(event.trigger_event.event_type) + ) + span.set_attribute("trigger_event.type", trigger_event_type) if flush_now and hasattr(self, "_span_processor"): self._span_processor.force_flush() @@ -492,55 +587,53 @@ def _send_event(self, event): def _reauthorize_jwt(self) -> Union[str, None]: with self._lock: payload = {"session_id": self.session_id} - serialized_payload = json.dumps(filter_unjsonable(payload)).encode("utf-8") - res = HttpClient.post( - f"{self.config.endpoint}/v2/reauthorize_jwt", - serialized_payload, - self.config.api_key, - ) - - logger.debug(res.body) - - if res.code != 200: + try: + serialized_payload = safe_serialize(payload).encode("utf-8") + res = HttpClient.post( + f"{self.config.endpoint}/v2/reauthorize_jwt", + serialized_payload, + self.config.api_key, + ) + if not res: + return None + jwt = res.body.get("jwt") + self.jwt = jwt + return jwt + except Exception as e: + logger.error(f"Failed to reauthorize JWT: {e}") return None - jwt = res.body.get("jwt", None) - self.jwt = jwt - return jwt - def _start_session(self): with self._lock: payload = {"session": self.__dict__} - serialized_payload = json.dumps(filter_unjsonable(payload)).encode("utf-8") - try: + serialized_payload = safe_serialize(payload).encode("utf-8") res = HttpClient.post( f"{self.config.endpoint}/v2/create_session", serialized_payload, api_key=self.config.api_key, parent_key=self.config.parent_key, ) - except ApiServerException as e: - return logger.error(f"Could not start session - {e}") + if not res: + return False + jwt = res.body.get("jwt") + self.jwt = jwt + if jwt is None: + return False - logger.debug(res.body) - - if res.code != 200: - return False - - jwt = res.body.get("jwt", None) - self.jwt = jwt - if jwt is None: - return False - - logger.info( - colored( - f"\x1b[34mSession Replay: {self.session_url}\x1b[0m", - "blue", + logger.info( + colored( + f"\x1b[34mSession Replay: {self.session_url}\x1b[0m", + "blue", + ) ) - ) - return True + return True + except ApiServerException as e: + return logger.error(f"Could not start session - {e}") + except Exception as e: + logger.error(f"Failed to start session: {e}") + return False def _update_session(self) -> None: """Update session state on the server""" From 0e2ff94f3b7148bbf8ce94a3389d66b9229ad65f Mon Sep 17 00:00:00 2001 From: Teo Date: Tue, 28 Jan 2025 01:00:28 +0200 Subject: [PATCH 06/31] test export Signed-off-by: Teo --- tests/integration/test_session_export.py | 115 +++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 tests/integration/test_session_export.py diff --git a/tests/integration/test_session_export.py b/tests/integration/test_session_export.py new file mode 100644 index 000000000..4564daaae --- /dev/null +++ b/tests/integration/test_session_export.py @@ -0,0 +1,115 @@ +import time +from uuid import uuid4 +from opentelemetry.sdk.trace import ReadableSpan, SpanExportResult +from agentops.session import Session, SessionExporter +from agentops.config import Configuration +from agentops.event import ActionEvent, LLMEvent + +class TestExporter(SessionExporter): + """Test exporter that tracks exports without making HTTP requests""" + def __init__(self, session: Session, **kwargs): + super().__init__(session, **kwargs) + self.export_count = 0 + self.exported_spans = [] + + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: + """Override export to count calls and store spans without making HTTP requests""" + self.export_count += 1 + self.exported_spans.extend(spans) + return SpanExportResult.SUCCESS + +def test_session_export_behavior(): + """Test the export behavior of a session with multiple event types""" + # Setup + config = Configuration( + api_key="test_key", + max_queue_size=10, # Small queue to force frequent exports + max_wait_time=100 # Short delay (milliseconds) to speed up test + ) + session = Session(session_id=uuid4(), config=config) + + # Replace real exporter with test exporter + test_exporter = TestExporter(session=session) + session._otel_exporter = test_exporter + + # Record different types of events + # LLM events + session.record(LLMEvent( + prompt="What is 2+2?", + completion="4", + model="gpt-3.5-turbo" + )) + + # Action events + session.record(ActionEvent( + action_type="calculate", + params={"x": 2, "y": 2}, + returns="4" + )) + + # Add some delay to allow for processing + time.sleep(0.2) + + # Force flush to ensure all spans are exported + session._span_processor.force_flush() + + # Verify exports occurred + assert test_exporter.export_count > 0, "Export should have been called at least once" + assert len(test_exporter.exported_spans) == 2, "Should have exported 2 spans" + + # Verify span contents + spans_by_name = {span.name: span for span in test_exporter.exported_spans} + + # Check LLM span + assert "llms" in spans_by_name, "Should have an LLM span" + llm_span = spans_by_name["llms"] + llm_data = llm_span.attributes.get("event.data") + assert "gpt-3.5-turbo" in llm_data, "LLM span should contain model information" + + # Check Action span + assert "actions" in spans_by_name, "Should have an Action span" + action_span = spans_by_name["actions"] + action_data = action_span.attributes.get("event.data") + assert "calculate" in action_data, "Action span should contain action type" + + # Clean up + session.end_session() + + print(f"Export was called {test_exporter.export_count} times") + print(f"Total spans exported: {len(test_exporter.exported_spans)}") + +def test_session_export_batching(): + """Test how the session batches events before exporting""" + config = Configuration( + api_key="test_key", + max_queue_size=5, # Small queue size + max_wait_time=500 # Longer delay to test batching + ) + session = Session(session_id=uuid4(), config=config) + + # Replace real exporter with test exporter + test_exporter = TestExporter(session=session) + session._otel_exporter = test_exporter + + # Record multiple events quickly + for i in range(10): + session.record(ActionEvent( + action_type=f"test_action_{i}", + params={"index": i} + )) + + # Add delay to allow for processing + time.sleep(1) + + # Force final flush + session._span_processor.force_flush() + + # Verify batching behavior + assert test_exporter.export_count >= 2, "Should have multiple export batches" + assert len(test_exporter.exported_spans) == 10, "Should have exported all 10 spans" + + # Clean up + session.end_session() + + print(f"Number of export batches: {test_exporter.export_count}") + print(f"Average batch size: {len(test_exporter.exported_spans) / test_exporter.export_count:.1f}") \ No newline at end of file From 710c02392a4fdddae0fb6352fc67e3d6aa227d5a Mon Sep 17 00:00:00 2001 From: Teo Date: Tue, 4 Feb 2025 01:40:44 +0200 Subject: [PATCH 07/31] SessionLogExporter: point to /v3/logs Signed-off-by: Teo --- agentops/session.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/agentops/session.py b/agentops/session.py index a9428fdb1..b1bcb4cf4 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -188,6 +188,7 @@ class SessionLogExporter(LogExporter): def __init__(self, session): self.session = session self._shutdown = False + self.endpoint = f"{session.config.endpoint}/v3/logs/{session.session_id}" def export(self, batch: Sequence[LogRecord]) -> LogExportResult: """ @@ -197,13 +198,26 @@ def export(self, batch: Sequence[LogRecord]) -> LogExportResult: return LogExportResult.SUCCESS try: - # TODO: Implement actual export logic - # For now, just print to console - for record in batch: - print(f"[Session {self.session.session_id}] {record.body}") - return LogExportResult.SUCCESS + # Format logs for API + log_data = { + "logs": [record.body for record in batch], + "start_time": self.session.init_timestamp, + "end_time": self.session.end_timestamp, + "is_capturing": not self._shutdown + } + + # Send logs to API + res = HttpClient.put( + self.endpoint, + json.dumps(log_data).encode("utf-8"), + api_key=self.session.config.api_key, + jwt=self.session.jwt + ) + + return LogExportResult.SUCCESS if res.code == 200 else LogExportResult.FAILURE + except Exception as e: - print(f"Failed to export logs: {e}") + logger.error(f"Failed to export logs: {e}") return LogExportResult.FAILURE def force_flush(self, timeout_millis: Optional[int] = None) -> bool: From b5abf03b3a160c8d9d4fad76b0df69a9a6b8c7b5 Mon Sep 17 00:00:00 2001 From: Teo Date: Tue, 4 Feb 2025 01:45:15 +0200 Subject: [PATCH 08/31] http_client: expose GET/POST/PUT/DELETE methods Added a private _make_request method that handles all the common request logicSimplified the public HTTP methods (GET/POST/PUT/DELETE) to use the common handlerEach method only specifies its unique parameters and passes them to _make_requestAdded DELETE method for completenessMaintained all existing error handling and response processingKept the connection pooling and header preparation logic unchanged Signed-off-by: Teo --- agentops/http_client.py | 87 +++++++++++++++++++++++------------------ 1 file changed, 48 insertions(+), 39 deletions(-) diff --git a/agentops/http_client.py b/agentops/http_client.py index 11c0bf49f..1f0fed5da 100644 --- a/agentops/http_client.py +++ b/agentops/http_client.py @@ -112,21 +112,30 @@ def _prepare_headers( return headers @classmethod - def post( + def _make_request( cls, + method: str, url: str, - payload: bytes, api_key: Optional[str] = None, parent_key: Optional[str] = None, jwt: Optional[str] = None, header: Optional[Dict[str, str]] = None, + payload: Optional[bytes] = None, ) -> Response: - """Make HTTP POST request using connection pooling""" + """Make HTTP request using connection pooling""" result = Response() try: headers = cls._prepare_headers(api_key, parent_key, jwt, header) session = cls.get_session() - res = session.post(url, data=payload, headers=headers, timeout=20) + + kwargs = { + "headers": headers, + "timeout": 20 + } + if payload is not None: + kwargs["data"] = payload + + res = getattr(session, method.lower())(url, **kwargs) result.parse(res) except requests.exceptions.Timeout: @@ -168,41 +177,41 @@ def get( jwt: Optional[str] = None, header: Optional[Dict[str, str]] = None, ) -> Response: - """Make HTTP GET request using connection pooling""" - result = Response() - try: - headers = cls._prepare_headers(api_key, None, jwt, header) - session = cls.get_session() - res = session.get(url, headers=headers, timeout=20) - result.parse(res) + """Make HTTP GET request""" + return cls._make_request("GET", url, api_key=api_key, jwt=jwt, header=header) - except requests.exceptions.Timeout: - result.code = 408 - result.status = HttpStatus.TIMEOUT - raise ApiServerException("Could not reach API server - connection timed out") - except requests.exceptions.HTTPError as e: - try: - result.parse(e.response) - except Exception: - result = Response() - result.code = e.response.status_code - result.status = Response.get_status(e.response.status_code) - result.body = {"error": str(e)} - raise ApiServerException(f"HTTPError: {e}") - except requests.exceptions.RequestException as e: - result.body = {"error": str(e)} - raise ApiServerException(f"RequestException: {e}") + @classmethod + def post( + cls, + url: str, + payload: bytes, + api_key: Optional[str] = None, + parent_key: Optional[str] = None, + jwt: Optional[str] = None, + header: Optional[Dict[str, str]] = None, + ) -> Response: + """Make HTTP POST request""" + return cls._make_request("POST", url, api_key=api_key, parent_key=parent_key, jwt=jwt, header=header, payload=payload) - if result.code == 401: - raise ApiServerException( - f"API server: invalid API key: {api_key}. Find your API key at https://app.agentops.ai/settings/projects" - ) - if result.code == 400: - if "message" in result.body: - raise ApiServerException(f"API server: {result.body['message']}") - else: - raise ApiServerException(f"API server: {result.body}") - if result.code == 500: - raise ApiServerException("API server: - internal server error") + @classmethod + def put( + cls, + url: str, + payload: bytes, + api_key: Optional[str] = None, + jwt: Optional[str] = None, + header: Optional[Dict[str, str]] = None, + ) -> Response: + """Make HTTP PUT request""" + return cls._make_request("PUT", url, api_key=api_key, jwt=jwt, header=header, payload=payload) - return result + @classmethod + def delete( + cls, + url: str, + api_key: Optional[str] = None, + jwt: Optional[str] = None, + header: Optional[Dict[str, str]] = None, + ) -> Response: + """Make HTTP DELETE request""" + return cls._make_request("DELETE", url, api_key=api_key, jwt=jwt, header=header) From 9993f808c7fd0aa74b1d70d578ea98b79085e5a1 Mon Sep 17 00:00:00 2001 From: Teo Date: Tue, 4 Feb 2025 15:18:06 +0200 Subject: [PATCH 09/31] tests: req_mock | mock `/v3/logs/` with regex pattern Signed-off-by: Teo --- tests/unit/conftest.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 30f70cc54..ed5c76daa 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -2,6 +2,7 @@ import uuid from collections import defaultdict from typing import Dict, Iterator, List +import re import pytest import requests_mock @@ -80,5 +81,8 @@ def reauthorize_jwt_response(request, context): m.post(base_url + "/v2/developer_errors", json={"status": "ok"}) m.post(base_url + "/v2/reauthorize_jwt", json=reauthorize_jwt_response) m.post(base_url + "/v2/create_agent", json={"status": "success"}) + # Use explicit regex pattern for logs endpoint to match any URL and session ID + logs_pattern = re.compile(r'.*/v3/logs/[0-9a-f-]{8}-[0-9a-f-]{4}-[0-9a-f-]{4}-[0-9a-f-]{4}-[0-9a-f-]{12}') + m.put(logs_pattern, json={"status": "success"}) yield m From ecc5037d1bd7657be1f9ddb7914ddae1f49b3ef7 Mon Sep 17 00:00:00 2001 From: Teo Date: Tue, 4 Feb 2025 15:18:30 +0200 Subject: [PATCH 10/31] tests(session): +TestSessionLogExporter Signed-off-by: Teo --- tests/unit/test_session.py | 129 +++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index c8a1fc909..85dab660f 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -12,6 +12,8 @@ from opentelemetry.sdk.trace.export import SpanExportResult from opentelemetry.trace import SpanContext, SpanKind, Status, StatusCode from opentelemetry.trace.span import TraceState +from opentelemetry.sdk._logs import LogRecord, LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter, LogExportResult import agentops from agentops import ActionEvent, Client @@ -619,3 +621,130 @@ def test_export_with_missing_id(self, mock_req): UUID(event["id"]) except ValueError: pytest.fail("Event ID is not a valid UUID") + + +class TestSessionLogExporter: + def setup_method(self): + """Set up test environment before each test""" + self.api_key = "11111111-1111-4111-8111-111111111111" + agentops.init(api_key=self.api_key, max_wait_time=50, auto_start_session=False) + self.session = agentops.start_session() + assert self.session is not None + self.log_exporter = self.session._log_exporter + + def teardown_method(self): + """Clean up after each test""" + if self.session: + self.session.end_session("Success") + agentops.end_all_sessions() + clear_singletons() + + def test_log_export_basic(self, mock_req): + """Test basic log export functionality""" + # Create a test log record + log_record = LogRecord( + timestamp=123456789, + trace_id=0x000000000000000000000000DEADBEEF, + span_id=0x00000000DEADBEF0, + trace_flags=0x01, + severity_text="INFO", + severity_number=9, + body="Test log message", + resource=self.session._logger_provider.resource, + attributes={}, + ) + + # Export the log record + result = self.log_exporter.export([log_record]) + + # Verify export was successful + assert result == LogExportResult.SUCCESS + + # Verify the request + assert len(mock_req.request_history) > 0 + last_request = mock_req.last_request.json() + assert "logs" in last_request + assert len(last_request["logs"]) == 1 + assert last_request["logs"][0] == "Test log message" + + def test_log_export_multiple_records(self, mock_req): + """Test exporting multiple log records at once""" + # Create test log records + log_records = [ + LogRecord( + timestamp=123456789, + trace_id=0x000000000000000000000000DEADBEEF, + span_id=0x00000000DEADBEF0, + trace_flags=0x01, + severity_text="INFO", + severity_number=9, + body=f"Test message {i}", + resource=self.session._logger_provider.resource, + attributes={}, + ) + for i in range(3) + ] + + # Export the log records + result = self.log_exporter.export(log_records) + + # Verify export was successful + assert result == LogExportResult.SUCCESS + + # Verify the request + assert len(mock_req.request_history) > 0 + last_request = mock_req.last_request.json() + assert "logs" in last_request + assert len(last_request["logs"]) == 3 + assert last_request["logs"] == ["Test message 0", "Test message 1", "Test message 2"] + + def test_log_export_after_shutdown(self, mock_req): + """Test that export after shutdown returns success without sending request""" + # Shutdown the exporter + self.log_exporter.shutdown() + + # Create a test log record + log_record = LogRecord( + timestamp=123456789, + trace_id=0x000000000000000000000000DEADBEEF, + span_id=0x00000000DEADBEF0, + trace_flags=0x01, + severity_text="INFO", + severity_number=9, + body="Test log message", + resource=self.session._logger_provider.resource, + attributes={}, + ) + + # Export should return success but not make request + result = self.log_exporter.export([log_record]) + assert result == LogExportResult.SUCCESS + + # Verify no request was made + assert not any(req.url.endswith("/v3/logs") for req in mock_req.request_history[-1:]) + + def test_log_export_with_session_metadata(self, mock_req): + """Test that exported logs include correct session metadata""" + # Create a test log record + log_record = LogRecord( + timestamp=123456789, + trace_id=0x000000000000000000000000DEADBEEF, + span_id=0x00000000DEADBEF0, + trace_flags=0x01, + severity_text="INFO", + severity_number=9, + body="Test log message", + resource=self.session._logger_provider.resource, + attributes={}, + ) + + # Export the log record + result = self.log_exporter.export([log_record]) + assert result == LogExportResult.SUCCESS + + # Verify the request includes session metadata + last_request = mock_req.last_request.json() + assert "start_time" in last_request + assert "end_time" in last_request + assert "is_capturing" in last_request + assert last_request["is_capturing"] == True From 2a53ce4c1662f8cf90f49b80c084672a048d8b45 Mon Sep 17 00:00:00 2001 From: Teo Date: Tue, 4 Feb 2025 15:20:15 +0200 Subject: [PATCH 11/31] tests(fixtures): +agentops_init, +agentops_session Signed-off-by: Teo --- tests/unit/conftest.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index ed5c76daa..bfeb796c1 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -86,3 +86,19 @@ def reauthorize_jwt_response(request, context): m.put(logs_pattern, json={"status": "success"}) yield m + + +@pytest.fixture +def agentops_init(): + agentops.init() + + +@pytest.fixture +def agentops_session(agentops_init): + session = agentops.start_session() + + assert session, "Failed agentops.start_session() returned None." + + yield session + + agentops.end_all_sessions() From 8385e6eaa7b0fcaf9df3a0e4908cbc27ae0ffaf6 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 01:09:29 +0200 Subject: [PATCH 12/31] delete deprecated tests/integration/test_session_export.py Signed-off-by: Teo --- tests/integration/test_session_export.py | 115 ----------------------- 1 file changed, 115 deletions(-) delete mode 100644 tests/integration/test_session_export.py diff --git a/tests/integration/test_session_export.py b/tests/integration/test_session_export.py deleted file mode 100644 index 4564daaae..000000000 --- a/tests/integration/test_session_export.py +++ /dev/null @@ -1,115 +0,0 @@ -import time -from uuid import uuid4 -from opentelemetry.sdk.trace import ReadableSpan, SpanExportResult -from agentops.session import Session, SessionExporter -from agentops.config import Configuration -from agentops.event import ActionEvent, LLMEvent - -class TestExporter(SessionExporter): - """Test exporter that tracks exports without making HTTP requests""" - def __init__(self, session: Session, **kwargs): - super().__init__(session, **kwargs) - self.export_count = 0 - self.exported_spans = [] - - def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: - """Override export to count calls and store spans without making HTTP requests""" - self.export_count += 1 - self.exported_spans.extend(spans) - return SpanExportResult.SUCCESS - -def test_session_export_behavior(): - """Test the export behavior of a session with multiple event types""" - # Setup - config = Configuration( - api_key="test_key", - max_queue_size=10, # Small queue to force frequent exports - max_wait_time=100 # Short delay (milliseconds) to speed up test - ) - session = Session(session_id=uuid4(), config=config) - - # Replace real exporter with test exporter - test_exporter = TestExporter(session=session) - session._otel_exporter = test_exporter - - # Record different types of events - # LLM events - session.record(LLMEvent( - prompt="What is 2+2?", - completion="4", - model="gpt-3.5-turbo" - )) - - # Action events - session.record(ActionEvent( - action_type="calculate", - params={"x": 2, "y": 2}, - returns="4" - )) - - # Add some delay to allow for processing - time.sleep(0.2) - - # Force flush to ensure all spans are exported - session._span_processor.force_flush() - - # Verify exports occurred - assert test_exporter.export_count > 0, "Export should have been called at least once" - assert len(test_exporter.exported_spans) == 2, "Should have exported 2 spans" - - # Verify span contents - spans_by_name = {span.name: span for span in test_exporter.exported_spans} - - # Check LLM span - assert "llms" in spans_by_name, "Should have an LLM span" - llm_span = spans_by_name["llms"] - llm_data = llm_span.attributes.get("event.data") - assert "gpt-3.5-turbo" in llm_data, "LLM span should contain model information" - - # Check Action span - assert "actions" in spans_by_name, "Should have an Action span" - action_span = spans_by_name["actions"] - action_data = action_span.attributes.get("event.data") - assert "calculate" in action_data, "Action span should contain action type" - - # Clean up - session.end_session() - - print(f"Export was called {test_exporter.export_count} times") - print(f"Total spans exported: {len(test_exporter.exported_spans)}") - -def test_session_export_batching(): - """Test how the session batches events before exporting""" - config = Configuration( - api_key="test_key", - max_queue_size=5, # Small queue size - max_wait_time=500 # Longer delay to test batching - ) - session = Session(session_id=uuid4(), config=config) - - # Replace real exporter with test exporter - test_exporter = TestExporter(session=session) - session._otel_exporter = test_exporter - - # Record multiple events quickly - for i in range(10): - session.record(ActionEvent( - action_type=f"test_action_{i}", - params={"index": i} - )) - - # Add delay to allow for processing - time.sleep(1) - - # Force final flush - session._span_processor.force_flush() - - # Verify batching behavior - assert test_exporter.export_count >= 2, "Should have multiple export batches" - assert len(test_exporter.exported_spans) == 10, "Should have exported all 10 spans" - - # Clean up - session.end_session() - - print(f"Number of export batches: {test_exporter.export_count}") - print(f"Average batch size: {len(test_exporter.exported_spans) / test_exporter.export_count:.1f}") \ No newline at end of file From d69030c0cfa943f5b64c3c2a0bf8ae44a49d3f18 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 01:09:35 +0200 Subject: [PATCH 13/31] ruff Signed-off-by: Teo --- agentops/session.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/agentops/session.py b/agentops/session.py index b1bcb4cf4..7f29e4017 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -183,12 +183,22 @@ def shutdown(self) -> None: class SessionLogExporter(LogExporter): """ Exports logs for a specific session to the AgentOps backend. + + The flow is: + 1. A log message is created + 2. The LoggingHandler captures it + 3. The LoggingHandler sends it to the LoggerProvider + 4. The LoggerProvider passes it to the BatchLogRecordProcessor + 5. The BatchLogRecordProcessor buffers the log records + 6. When conditions are met (batch size/time/flush), the BatchLogRecordProcessor calls `export()` on the SessionLogExporter + """ - def __init__(self, session): + session: Session + + def __init__(self, session: Session): self.session = session self._shutdown = False - self.endpoint = f"{session.config.endpoint}/v3/logs/{session.session_id}" def export(self, batch: Sequence[LogRecord]) -> LogExportResult: """ @@ -203,15 +213,15 @@ def export(self, batch: Sequence[LogRecord]) -> LogExportResult: "logs": [record.body for record in batch], "start_time": self.session.init_timestamp, "end_time": self.session.end_timestamp, - "is_capturing": not self._shutdown + "is_capturing": not self._shutdown, } # Send logs to API res = HttpClient.put( - self.endpoint, + f"{self.session.config.endpoint}/v3/logs/{self.session.session_id}", json.dumps(log_data).encode("utf-8"), api_key=self.session.config.api_key, - jwt=self.session.jwt + jwt=self.session.jwt, ) return LogExportResult.SUCCESS if res.code == 200 else LogExportResult.FAILURE From b55c3f543cbf8e343d003c1bf090b690803c6766 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 19:41:25 +0200 Subject: [PATCH 14/31] Consolidate telemetry management under `instrumentation.py` separing it from `Session` Signed-off-by: Teo --- agentops/instrumentation.py | 93 +++++++++++++++++++++++++++---------- agentops/session.py | 33 +++---------- 2 files changed, 74 insertions(+), 52 deletions(-) diff --git a/agentops/instrumentation.py b/agentops/instrumentation.py index 3d7504b7d..fac5bd7a3 100644 --- a/agentops/instrumentation.py +++ b/agentops/instrumentation.py @@ -6,44 +6,87 @@ from uuid import UUID from opentelemetry import trace -from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor +from opentelemetry.sdk.resources import Resource, SERVICE_NAME from opentelemetry.sdk.trace import SpanProcessor, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter from opentelemetry.sdk.trace.sampling import ParentBased, Sampler, TraceIdRatioBased if TYPE_CHECKING: - from opentelemetry.sdk._logs import LoggingHandler - from agentops.client import Client +""" +This module handles OpenTelemetry instrumentation setup for AgentOps sessions. -_log_handler = None - +Each AgentOps session requires its own telemetry setup to: +1. Track session-specific logs +2. Export logs to the AgentOps backend +3. Maintain isolation between different sessions running concurrently -def set_log_handler(log_handler: Optional[LoggingHandler]) -> None: - """Set the OTLP log handler. +The module provides functions to: +- Set up logging telemetry components for a new session +- Clean up telemetry components when a session ends +""" +def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandler, BatchLogRecordProcessor]: + """Set up OpenTelemetry logging components for a new session. + + This function creates the necessary components to capture and export logs for a specific session: + - A LoggerProvider with session-specific resource attributes + - A BatchLogRecordProcessor to batch and export logs + - A LoggingHandler to capture logs and forward them to the processor + Args: - log_handler: The logging handler to use for OTLP - """ - _log_handler = log_handler - - -def get_log_handler() -> Optional[LoggingHandler]: - """Get the current OTLP log handler. - + session_id: Unique identifier for the session, used to tag telemetry data + log_exporter: SessionLogExporter instance that handles sending logs to AgentOps backend + Returns: - The current logging handler if set, None otherwise + Tuple containing: + - LoggingHandler: Handler that should be added to the logger + - BatchLogRecordProcessor: Processor that batches and exports logs + + Used by: + Session class during initialization to set up logging for the new session """ - return _log_handler - - -def add_telemetry_log_handler(logger: logging.Logger) -> None: - """Add the OTLP log handler to the given logger if configured. + # Create logging components + resource = Resource.create({SERVICE_NAME: f"agentops.session.{session_id}"}) + logger_provider = LoggerProvider(resource=resource) + + # Create processor and handler + log_processor = BatchLogRecordProcessor(log_exporter) + log_handler = LoggingHandler( + level=logging.INFO, + logger_provider=logger_provider, + ) + + return log_handler, log_processor +def cleanup_session_telemetry(log_handler: LoggingHandler, log_processor: BatchLogRecordProcessor) -> None: + """Clean up OpenTelemetry logging components when a session ends. + + This function ensures proper cleanup by: + 1. Removing the handler from the logger + 2. Closing the handler to free resources + 3. Flushing any pending logs in the processor + 4. Shutting down the processor + Args: - logger: The logger to add the handler to + log_handler: The session's LoggingHandler to be removed and closed + log_processor: The session's BatchLogRecordProcessor to be flushed and shutdown + + Used by: + Session.end_session() to clean up logging components when the session ends """ - global _log_handler - if _log_handler: - logger.addHandler(_log_handler) + from agentops.log_config import logger + + try: + # Remove and close handler + logger.removeHandler(log_handler) + log_handler.close() + + # Shutdown processor + log_processor.force_flush(timeout_millis=5000) + log_processor.shutdown() + except Exception as e: + logger.warning(f"Error during logging cleanup: {e}") diff --git a/agentops/session.py b/agentops/session.py index 7f29e4017..1060c4c51 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -26,6 +26,7 @@ from .helpers import filter_unjsonable, get_ISO_time, safe_serialize from .http_client import HttpClient, Response from .log_config import logger +from .instrumentation import setup_session_telemetry, cleanup_session_telemetry """ OTEL Guidelines: @@ -332,15 +333,10 @@ def __init__( self._tracer_provider.add_span_processor(self._span_processor) # Initialize logging components - resource = Resource.create({SERVICE_NAME: f"agentops.session.{str(session_id)}"}) - self._logger_provider = LoggerProvider(resource=resource) self._log_exporter = SessionLogExporter(session=self) - self._log_processor = BatchLogRecordProcessor(self._log_exporter) - - # Create and install session-specific logging handler - self._log_handler = LoggingHandler( - level=logging.INFO, - logger_provider=self._logger_provider, + self._log_handler, self._log_processor = setup_session_telemetry( + str(session_id), + self._log_exporter ) logger.addHandler(self._log_handler) @@ -416,25 +412,8 @@ def end_session( del self._span_processor # 5. Clean up logging components - if hasattr(self, "_log_handler"): - try: - # Remove and close the log handler - logger.removeHandler(self._log_handler) - self._log_handler.close() - except Exception as e: - logger.warning(f"Error during log handler cleanup: {e}") - finally: - del self._log_handler - - if hasattr(self, "_log_processor"): - try: - # Force flush and shutdown the log processor - self._log_processor.force_flush(timeout_millis=5000) - self._log_processor.shutdown() - except Exception as e: - logger.warning(f"Error during log processor cleanup: {e}") - finally: - del self._log_processor + if hasattr(self, "_log_handler") and hasattr(self, "_log_processor"): + cleanup_session_telemetry(self._log_handler, self._log_processor) # 6. Final session update if not (analytics_stats := self.get_analytics()): From d8648869e3e4574801225a0a9a50e9cbf04a5187 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 19:52:15 +0200 Subject: [PATCH 15/31] draft Signed-off-by: Teo --- agentops/instrumentation.py | 40 ++++++- agentops/log_capture.py | 12 +-- tests/unit/test_instrumentation.py | 163 +++++++++++++++++++++++++++++ tests/unit/test_session.py | 100 ++++++++++++++++-- 4 files changed, 298 insertions(+), 17 deletions(-) create mode 100644 tests/unit/test_instrumentation.py diff --git a/agentops/instrumentation.py b/agentops/instrumentation.py index fac5bd7a3..43b3f2331 100644 --- a/agentops/instrumentation.py +++ b/agentops/instrumentation.py @@ -29,6 +29,32 @@ - Clean up telemetry components when a session ends """ +# Map of session_id to LoggingHandler +_session_handlers: Dict[UUID, LoggingHandler] = {} + +def get_session_handler(session_id: UUID) -> Optional[LoggingHandler]: + """Get the logging handler for a specific session. + + Args: + session_id: The UUID of the session + + Returns: + The session's LoggingHandler if it exists, None otherwise + """ + return _session_handlers.get(session_id) + +def set_session_handler(session_id: UUID, handler: Optional[LoggingHandler]) -> None: + """Set or remove the logging handler for a session. + + Args: + session_id: The UUID of the session + handler: The handler to set, or None to remove + """ + if handler is None: + _session_handlers.pop(session_id, None) + else: + _session_handlers[session_id] = handler + def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandler, BatchLogRecordProcessor]: """Set up OpenTelemetry logging components for a new session. @@ -45,9 +71,6 @@ def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandl Tuple containing: - LoggingHandler: Handler that should be added to the logger - BatchLogRecordProcessor: Processor that batches and exports logs - - Used by: - Session class during initialization to set up logging for the new session """ # Create logging components resource = Resource.create({SERVICE_NAME: f"agentops.session.{session_id}"}) @@ -55,11 +78,16 @@ def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandl # Create processor and handler log_processor = BatchLogRecordProcessor(log_exporter) + logger_provider.add_log_record_processor(log_processor) # Add processor to provider + log_handler = LoggingHandler( level=logging.INFO, logger_provider=logger_provider, ) + # Register handler with session + set_session_handler(UUID(session_id), log_handler) + return log_handler, log_processor def cleanup_session_telemetry(log_handler: LoggingHandler, log_processor: BatchLogRecordProcessor) -> None: @@ -85,6 +113,12 @@ def cleanup_session_telemetry(log_handler: LoggingHandler, log_processor: BatchL logger.removeHandler(log_handler) log_handler.close() + # Remove from session handlers + for session_id, handler in list(_session_handlers.items()): + if handler is log_handler: + set_session_handler(session_id, None) + break + # Shutdown processor log_processor.force_flush(timeout_millis=5000) log_processor.shutdown() diff --git a/agentops/log_capture.py b/agentops/log_capture.py index 30e1452a0..be885734c 100644 --- a/agentops/log_capture.py +++ b/agentops/log_capture.py @@ -9,7 +9,7 @@ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter from opentelemetry.sdk.resources import Resource -from agentops.instrumentation import get_log_handler, set_log_handler +from agentops.instrumentation import get_session_handler, set_session_handler if TYPE_CHECKING: from agentops.session import Session @@ -89,8 +89,8 @@ def start(self): self.start_time = get_ISO_time() self.is_capturing = True - # Try to get handler from telemetry manager - get_log_handler() + # Try to get handler from session + self._handler = get_session_handler(self.session_id) # Create our own handler if none exists if not self._handler: @@ -112,8 +112,8 @@ def start(self): logger_provider=self._logger_provider, ) - # Register with telemetry manager if available - set_log_handler(self._handler) + # Register with session + set_session_handler(self.session_id, self._handler) # Add handler to both loggers self._stdout_logger.addHandler(self._handler) @@ -155,7 +155,7 @@ def stop(self): self._logger_provider.shutdown() # Clear from telemetry manager if we created it - set_log_handler(None) + set_session_handler(self.session_id, None) self._handler = None self._logger_provider = None diff --git a/tests/unit/test_instrumentation.py b/tests/unit/test_instrumentation.py new file mode 100644 index 000000000..96bcc15ed --- /dev/null +++ b/tests/unit/test_instrumentation.py @@ -0,0 +1,163 @@ +import logging +from uuid import uuid4 +from unittest.mock import Mock + +import pytest +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor + +from agentops.instrumentation import setup_session_telemetry, cleanup_session_telemetry +from agentops.log_config import logger +from agentops.session import SessionLogExporter +from agentops.log_capture import LogCapture +from agentops.session import add_session + + +class TestSessionTelemetry: + @pytest.fixture + def mock_session(self, base_url): + """Create a mock session with required attributes""" + session = Mock() + session.session_id = uuid4() + session.jwt = "test_jwt" + session.config = Mock() + session.config.endpoint = base_url + # Add session to registry so LogCapture can find it + add_session(session) + return session + + @pytest.fixture + def session_id(self): + return str(uuid4()) + + @pytest.fixture + def initial_handler_count(self): + """Get initial number of handlers on the logger""" + return len(logger.handlers) + + def test_setup_telemetry_components(self, session_id, mock_req, mock_session): + """Test that telemetry setup creates and returns the expected components""" + # Set up telemetry with real exporter + log_exporter = SessionLogExporter(session=mock_session) + log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) + + # Verify components are created with correct types + assert isinstance(log_handler, LoggingHandler) + assert isinstance(log_processor, BatchLogRecordProcessor) + + # Verify handler has correct configuration + assert log_handler.level == logging.INFO + assert isinstance(log_handler._logger_provider, LoggerProvider) + + # Clean up + cleanup_session_telemetry(log_handler, log_processor) + + def test_handler_installation_and_cleanup(self, session_id, mock_req, mock_session, initial_handler_count): + """Test that handler is properly installed and removed""" + # Set up telemetry + log_exporter = SessionLogExporter(session=mock_session) + log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) + logger.addHandler(log_handler) + + # Verify handler was added + assert len(logger.handlers) == initial_handler_count + 1 + assert log_handler in logger.handlers + + # Clean up + cleanup_session_telemetry(log_handler, log_processor) + + # Verify handler was removed + assert len(logger.handlers) == initial_handler_count + assert log_handler not in logger.handlers + + def test_logging_with_telemetry(self, mock_req, mock_session): + """Test that logs are captured and exported""" + # Create and start log capture + capture = LogCapture(session_id=mock_session.session_id) + capture.start() + + try: + session_id = str(mock_session.session_id) + print(f"\nSession ID: {session_id}") + + log_exporter = SessionLogExporter(session=mock_session) + log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) + logger.addHandler(log_handler) + + # Log some messages + test_message = "Test log message" + print(f"Sending message: {test_message}") + logger.info(test_message) + + # Force flush logs + print("Forcing flush...") + log_processor.force_flush() + print("Flush complete") + + # Debug: Print all request URLs and mock setup + print("\nMock setup:") + print(f"Base URL: {mock_session.config.endpoint}") + print(f"Expected endpoint: {mock_session.config.endpoint}/v3/logs/{session_id}") + print("\nRequest history:") + for req in mock_req.request_history: + print(f"Method: {req.method}, URL: {req.url}") + if hasattr(req, 'text'): + print(f"Body: {req.text}") + + # Verify the request was made to the logs endpoint + assert any(req.url.endswith(f"/v3/logs/{session_id}") for req in mock_req.request_history), \ + f"No request found for /v3/logs/{session_id} in {[req.url for req in mock_req.request_history]}" + + finally: + # Clean up + capture.stop() + cleanup_session_telemetry(log_handler, log_processor) + + def test_cleanup_prevents_further_logging(self, session_id, mock_req, mock_session): + """Test that cleanup prevents further log exports""" + # Set up telemetry + log_exporter = SessionLogExporter(session=mock_session) + log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) + logger.addHandler(log_handler) + + # Log before cleanup + logger.info("Before cleanup") + initial_request_count = len([r for r in mock_req.request_history if r.url.endswith(f"/v3/logs/{session_id}")]) + + # Clean up + cleanup_session_telemetry(log_handler, log_processor) + + # Try logging after cleanup + logger.info("After cleanup") + log_processor.force_flush() + + # Verify no new requests were made + final_request_count = len([r for r in mock_req.request_history if r.url.endswith(f"/v3/logs/{session_id}")]) + assert final_request_count == initial_request_count + + def test_multiple_sessions_isolation(self, mock_req, mock_session): + """Test that multiple sessions maintain logging isolation""" + # Set up two sessions + session_id_1 = str(uuid4()) + session_id_2 = str(uuid4()) + + log_exporter = SessionLogExporter(session=mock_session) + handler1, processor1 = setup_session_telemetry(session_id_1, log_exporter) + handler2, processor2 = setup_session_telemetry(session_id_2, log_exporter) + + logger.addHandler(handler1) + logger.addHandler(handler2) + + # Verify both handlers are present + assert handler1 in logger.handlers + assert handler2 in logger.handlers + + # Clean up one session + cleanup_session_telemetry(handler1, processor1) + + # Verify only the correct handler was removed + assert handler1 not in logger.handlers + assert handler2 in logger.handlers + + # Clean up the other session + cleanup_session_telemetry(handler2, processor2) \ No newline at end of file diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index 85dab660f..c13bfbfc5 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -8,12 +8,12 @@ import pytest import requests_mock from opentelemetry import trace +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler, LogRecord +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter, LogExportResult from opentelemetry.sdk.trace import ReadableSpan from opentelemetry.sdk.trace.export import SpanExportResult from opentelemetry.trace import SpanContext, SpanKind, Status, StatusCode from opentelemetry.trace.span import TraceState -from opentelemetry.sdk._logs import LogRecord, LoggerProvider, LoggingHandler -from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter, LogExportResult import agentops from agentops import ActionEvent, Client @@ -656,10 +656,10 @@ def test_log_export_basic(self, mock_req): # Export the log record result = self.log_exporter.export([log_record]) - + # Verify export was successful assert result == LogExportResult.SUCCESS - + # Verify the request assert len(mock_req.request_history) > 0 last_request = mock_req.last_request.json() @@ -687,10 +687,10 @@ def test_log_export_multiple_records(self, mock_req): # Export the log records result = self.log_exporter.export(log_records) - + # Verify export was successful assert result == LogExportResult.SUCCESS - + # Verify the request assert len(mock_req.request_history) > 0 last_request = mock_req.last_request.json() @@ -702,7 +702,7 @@ def test_log_export_after_shutdown(self, mock_req): """Test that export after shutdown returns success without sending request""" # Shutdown the exporter self.log_exporter.shutdown() - + # Create a test log record log_record = LogRecord( timestamp=123456789, @@ -719,7 +719,7 @@ def test_log_export_after_shutdown(self, mock_req): # Export should return success but not make request result = self.log_exporter.export([log_record]) assert result == LogExportResult.SUCCESS - + # Verify no request was made assert not any(req.url.endswith("/v3/logs") for req in mock_req.request_history[-1:]) @@ -748,3 +748,87 @@ def test_log_export_with_session_metadata(self, mock_req): assert "end_time" in last_request assert "is_capturing" in last_request assert last_request["is_capturing"] == True + + +class TestSessionLogging: + def setup_method(self): + """Set up test environment before each test""" + self.api_key = "11111111-1111-4111-8111-111111111111" + agentops.init(api_key=self.api_key, max_wait_time=50, auto_start_session=False) + self.session = agentops.start_session() + assert self.session is not None + + def teardown_method(self): + """Clean up after each test""" + if self.session: + self.session.end_session("Success") + agentops.end_all_sessions() + clear_singletons() + + @pytest.fixture + def logger(self): + from agentops.log_config import logger + + return logger + + def test_log_handler_installation(self, logger): + """Test that a log handler is correctly installed when session starts""" + # Check that the session has the required logging components + assert hasattr(self.session, "_logger_provider") + assert hasattr(self.session, "_log_handler") + assert hasattr(self.session, "_log_processor") + + # Verify the log handler is in the root logger's handlers + assert any(isinstance(handler, LoggingHandler) for handler in logger.handlers) + + # def test_log_handler_removal_on_session_end(self): + # """Test that the log handler is removed when session ends""" + # # Get initial handler count + # initial_handlers = len(logger.handlers) + + # # End the session + # self.session.end_session("Success") + + # # Verify handler was removed + # assert len(logger.handlers) == initial_handlers - 1 + # assert not any( + # isinstance(handler, LoggingHandler) + # for handler in logger.handlers + # ) + + # def test_logging_with_session(self, mock_req): + # """Test that logging works with an active session""" + # # Log a test message + # test_message = "Test log message" + # logger.info(test_message) + + # # Force flush logs + # self.session._log_processor.force_flush() + + # # Verify the request + # assert len(mock_req.request_history) > 0 + # last_request = mock_req.last_request.json() + # assert "logs" in last_request + # assert test_message in last_request["logs"] + + # def test_multiple_log_messages(self, mock_req): + # """Test handling of multiple log messages""" + # # Log multiple test messages + # test_messages = [ + # "First test message", + # "Second test message", + # "Third test message" + # ] + + # for msg in test_messages: + # logger.info(msg) + + # # Force flush logs + # self.session._log_processor.force_flush() + + # # Verify the request + # assert len(mock_req.request_history) > 0 + # last_request = mock_req.last_request.json() + # assert "logs" in last_request + # for msg in test_messages: + # assert msg in last_request["logs"] From 986b24afd261d8237eb469ee872ba59afbf0b997 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 19:57:53 +0200 Subject: [PATCH 16/31] TestSessionLogExporter to match new instrumenation Signed-off-by: Teo --- tests/unit/test_session.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index c13bfbfc5..a44e2af73 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -18,6 +18,8 @@ import agentops from agentops import ActionEvent, Client from agentops.http_client import HttpClient +from agentops.instrumentation import cleanup_session_telemetry, setup_session_telemetry +from agentops.session import SessionLogExporter from agentops.singleton import clear_singletons @@ -630,10 +632,15 @@ def setup_method(self): agentops.init(api_key=self.api_key, max_wait_time=50, auto_start_session=False) self.session = agentops.start_session() assert self.session is not None - self.log_exporter = self.session._log_exporter + + # Set up logging components through instrumentation + self.log_exporter = SessionLogExporter(session=self.session) + self.log_handler, self.log_processor = setup_session_telemetry(str(self.session.session_id), self.log_exporter) def teardown_method(self): """Clean up after each test""" + if hasattr(self, "log_handler") and hasattr(self, "log_processor"): + cleanup_session_telemetry(self.log_handler, self.log_processor) if self.session: self.session.end_session("Success") agentops.end_all_sessions() @@ -650,7 +657,7 @@ def test_log_export_basic(self, mock_req): severity_text="INFO", severity_number=9, body="Test log message", - resource=self.session._logger_provider.resource, + resource=self.log_handler._logger_provider.resource, attributes={}, ) @@ -679,7 +686,7 @@ def test_log_export_multiple_records(self, mock_req): severity_text="INFO", severity_number=9, body=f"Test message {i}", - resource=self.session._logger_provider.resource, + resource=self.log_handler._logger_provider.resource, attributes={}, ) for i in range(3) @@ -712,7 +719,7 @@ def test_log_export_after_shutdown(self, mock_req): severity_text="INFO", severity_number=9, body="Test log message", - resource=self.session._logger_provider.resource, + resource=self.log_handler._logger_provider.resource, attributes={}, ) @@ -734,7 +741,7 @@ def test_log_export_with_session_metadata(self, mock_req): severity_text="INFO", severity_number=9, body="Test log message", - resource=self.session._logger_provider.resource, + resource=self.log_handler._logger_provider.resource, attributes={}, ) @@ -774,10 +781,6 @@ def logger(self): def test_log_handler_installation(self, logger): """Test that a log handler is correctly installed when session starts""" # Check that the session has the required logging components - assert hasattr(self.session, "_logger_provider") - assert hasattr(self.session, "_log_handler") - assert hasattr(self.session, "_log_processor") - # Verify the log handler is in the root logger's handlers assert any(isinstance(handler, LoggingHandler) for handler in logger.handlers) From 5bdaf24e71ba50119896de6f56d99631fbe9d070 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 20:02:32 +0200 Subject: [PATCH 17/31] TestSessionLogging: ensures we're tracking the specific handler created for this session Signed-off-by: Teo logger -> agentops_logger Signed-off-by: Teo --- tests/unit/test_session.py | 50 ++++++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index a44e2af73..c4e590427 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -773,31 +773,39 @@ def teardown_method(self): clear_singletons() @pytest.fixture - def logger(self): + def agentops_logger(self): from agentops.log_config import logger return logger - def test_log_handler_installation(self, logger): - """Test that a log handler is correctly installed when session starts""" - # Check that the session has the required logging components - # Verify the log handler is in the root logger's handlers - assert any(isinstance(handler, LoggingHandler) for handler in logger.handlers) - - # def test_log_handler_removal_on_session_end(self): - # """Test that the log handler is removed when session ends""" - # # Get initial handler count - # initial_handlers = len(logger.handlers) - - # # End the session - # self.session.end_session("Success") - - # # Verify handler was removed - # assert len(logger.handlers) == initial_handlers - 1 - # assert not any( - # isinstance(handler, LoggingHandler) - # for handler in logger.handlers - # ) + def test_log_handler_installation(self, agentops_logger): + """Test that the session's specific log handler is correctly installed""" + # Get the handler that was created for this session + session_handler = self.session._log_handler + + # Verify the handler exists and is a LoggingHandler + assert isinstance(session_handler, LoggingHandler), "Session should have a LoggingHandler instance" + + # Verify this specific handler is in the logger's handlers + assert session_handler in agentops_logger.handlers, "Session's specific LoggingHandler should be in logger's handlers" + + # Count how many times this specific handler appears + handler_count = sum(1 for h in agentops_logger.handlers if h is session_handler) + assert handler_count == 1, "Session's LoggingHandler should appear exactly once in logger's handlers" + + def test_log_handler_removal_on_session_end(self, agentops_logger): + """Test that the session's specific log handler is removed when session ends""" + # Get the handler that was created for this session + this_session_logging_handler = self.session._log_handler + + # Verify handler exists before ending session + assert this_session_logging_handler in agentops_logger.handlers, "Session handler should be present before ending session" + + # End the session + self.session.end_session("Success") + + # Verify the specific handler was removed + assert this_session_logging_handler not in agentops_logger.handlers, "Session handler should be removed after ending session" # def test_logging_with_session(self, mock_req): # """Test that logging works with an active session""" From d487ff8987850b3e03f5f61ef628e6f75c9b52f6 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 20:07:43 +0200 Subject: [PATCH 18/31] test-log-cap Signed-off-by: Teo --- tests/unit/test_log_capture.py | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/tests/unit/test_log_capture.py b/tests/unit/test_log_capture.py index 83ff57ff8..dcbd0f62b 100644 --- a/tests/unit/test_log_capture.py +++ b/tests/unit/test_log_capture.py @@ -10,7 +10,7 @@ from opentelemetry.sdk.resources import Resource from rich.console import Console -from agentops.instrumentation import set_log_handler +from agentops.log_config import logger as agentops_logger from agentops.log_capture import LogCapture @@ -20,14 +20,23 @@ def session_id(): @pytest.fixture -def mock_session(session_id): - """Create a mock session""" - +def mock_session(session_id, logger_provider): + """Create a mock session with logging components""" class MockSession: - def __init__(self, session_id): + def __init__(self, session_id, logger_provider): self.session_id = session_id + self._log_handler = LoggingHandler( + level=logging.INFO, + logger_provider=logger_provider, + ) + # Add handler to logger + agentops_logger.addHandler(self._log_handler) + + def cleanup(self): + # Remove handler from logger + agentops_logger.removeHandler(self._log_handler) - return MockSession(session_id) + return MockSession(session_id, logger_provider) @pytest.fixture @@ -45,20 +54,15 @@ def logger_provider(): @pytest.fixture -def capture(session_id, logger_provider, mock_session): +def capture(session_id, mock_session): """Set up LogCapture with OpenTelemetry logging""" - handler = LoggingHandler( - level=logging.INFO, - logger_provider=logger_provider, - ) - set_log_handler(handler) - # Mock the session registry to return our mock session with patch("agentops.session.get_active_sessions", return_value=[mock_session]): capture = LogCapture(session_id=session_id) yield capture - set_log_handler(None) # Clean up + # Clean up + mock_session.cleanup() def test_basic_stdout_capture(capture): From 4cf1080201c19a43fa79103ad47a012d54583cdb Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 21:37:58 +0200 Subject: [PATCH 19/31] fix(instrumentation): correct session_id type in handler setup --- agentops/instrumentation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/agentops/instrumentation.py b/agentops/instrumentation.py index 43b3f2331..a22e2a8e3 100644 --- a/agentops/instrumentation.py +++ b/agentops/instrumentation.py @@ -86,7 +86,7 @@ def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandl ) # Register handler with session - set_session_handler(UUID(session_id), log_handler) + set_session_handler(session_id, log_handler) return log_handler, log_processor From def0df7c62967a6b2848c15f8c41a9f0c0415435 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 22:54:21 +0200 Subject: [PATCH 20/31] test_instrumentation.py: use real agentops_session instead of mock session Signed-off-by: Teo --- tests/unit/test_instrumentation.py | 42 +++++++++++------------------- 1 file changed, 15 insertions(+), 27 deletions(-) diff --git a/tests/unit/test_instrumentation.py b/tests/unit/test_instrumentation.py index 96bcc15ed..e80ba1855 100644 --- a/tests/unit/test_instrumentation.py +++ b/tests/unit/test_instrumentation.py @@ -14,18 +14,6 @@ class TestSessionTelemetry: - @pytest.fixture - def mock_session(self, base_url): - """Create a mock session with required attributes""" - session = Mock() - session.session_id = uuid4() - session.jwt = "test_jwt" - session.config = Mock() - session.config.endpoint = base_url - # Add session to registry so LogCapture can find it - add_session(session) - return session - @pytest.fixture def session_id(self): return str(uuid4()) @@ -35,10 +23,10 @@ def initial_handler_count(self): """Get initial number of handlers on the logger""" return len(logger.handlers) - def test_setup_telemetry_components(self, session_id, mock_req, mock_session): + def test_setup_telemetry_components(self, session_id, mock_req, agentops_session): """Test that telemetry setup creates and returns the expected components""" # Set up telemetry with real exporter - log_exporter = SessionLogExporter(session=mock_session) + log_exporter = SessionLogExporter(session=agentops_session) log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) # Verify components are created with correct types @@ -52,10 +40,10 @@ def test_setup_telemetry_components(self, session_id, mock_req, mock_session): # Clean up cleanup_session_telemetry(log_handler, log_processor) - def test_handler_installation_and_cleanup(self, session_id, mock_req, mock_session, initial_handler_count): + def test_handler_installation_and_cleanup(self, session_id, mock_req, agentops_session, initial_handler_count): """Test that handler is properly installed and removed""" # Set up telemetry - log_exporter = SessionLogExporter(session=mock_session) + log_exporter = SessionLogExporter(session=agentops_session) log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) logger.addHandler(log_handler) @@ -70,17 +58,17 @@ def test_handler_installation_and_cleanup(self, session_id, mock_req, mock_sessi assert len(logger.handlers) == initial_handler_count assert log_handler not in logger.handlers - def test_logging_with_telemetry(self, mock_req, mock_session): + def test_logging_with_telemetry(self, mock_req, agentops_session): """Test that logs are captured and exported""" # Create and start log capture - capture = LogCapture(session_id=mock_session.session_id) + capture = LogCapture(session_id=agentops_session.session_id) capture.start() try: - session_id = str(mock_session.session_id) + session_id = str(agentops_session.session_id) print(f"\nSession ID: {session_id}") - log_exporter = SessionLogExporter(session=mock_session) + log_exporter = SessionLogExporter(session=agentops_session) log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) logger.addHandler(log_handler) @@ -96,8 +84,8 @@ def test_logging_with_telemetry(self, mock_req, mock_session): # Debug: Print all request URLs and mock setup print("\nMock setup:") - print(f"Base URL: {mock_session.config.endpoint}") - print(f"Expected endpoint: {mock_session.config.endpoint}/v3/logs/{session_id}") + print(f"Base URL: {agentops_session.config.endpoint}") + print(f"Expected endpoint: {agentops_session.config.endpoint}/v3/logs/{session_id}") print("\nRequest history:") for req in mock_req.request_history: print(f"Method: {req.method}, URL: {req.url}") @@ -113,10 +101,10 @@ def test_logging_with_telemetry(self, mock_req, mock_session): capture.stop() cleanup_session_telemetry(log_handler, log_processor) - def test_cleanup_prevents_further_logging(self, session_id, mock_req, mock_session): + def test_cleanup_prevents_further_logging(self, session_id, mock_req, agentops_session): """Test that cleanup prevents further log exports""" # Set up telemetry - log_exporter = SessionLogExporter(session=mock_session) + log_exporter = SessionLogExporter(session=agentops_session) log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) logger.addHandler(log_handler) @@ -135,13 +123,13 @@ def test_cleanup_prevents_further_logging(self, session_id, mock_req, mock_sessi final_request_count = len([r for r in mock_req.request_history if r.url.endswith(f"/v3/logs/{session_id}")]) assert final_request_count == initial_request_count - def test_multiple_sessions_isolation(self, mock_req, mock_session): + def test_multiple_sessions_isolation(self, mock_req, agentops_session): """Test that multiple sessions maintain logging isolation""" # Set up two sessions session_id_1 = str(uuid4()) session_id_2 = str(uuid4()) - log_exporter = SessionLogExporter(session=mock_session) + log_exporter = SessionLogExporter(session=agentops_session) handler1, processor1 = setup_session_telemetry(session_id_1, log_exporter) handler2, processor2 = setup_session_telemetry(session_id_2, log_exporter) @@ -160,4 +148,4 @@ def test_multiple_sessions_isolation(self, mock_req, mock_session): assert handler2 in logger.handlers # Clean up the other session - cleanup_session_telemetry(handler2, processor2) \ No newline at end of file + cleanup_session_telemetry(handler2, processor2) From 0161ee5c012796b0583e8652940a7bd9aaebf1af Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 5 Feb 2025 22:55:15 +0200 Subject: [PATCH 21/31] session - log exporter: use safe_serialize instead of json.dumps on logs Signed-off-by: Teo --- agentops/session.py | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/agentops/session.py b/agentops/session.py index 1060c4c51..5fc1cb3f2 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -25,8 +25,8 @@ from .exceptions import ApiServerException from .helpers import filter_unjsonable, get_ISO_time, safe_serialize from .http_client import HttpClient, Response +from .instrumentation import cleanup_session_telemetry, setup_session_telemetry from .log_config import logger -from .instrumentation import setup_session_telemetry, cleanup_session_telemetry """ OTEL Guidelines: @@ -202,25 +202,17 @@ def __init__(self, session: Session): self._shutdown = False def export(self, batch: Sequence[LogRecord]) -> LogExportResult: - """ - Export the log records to the AgentOps backend. - """ + """Export the log records to the AgentOps backend.""" if self._shutdown: return LogExportResult.SUCCESS try: # Format logs for API - log_data = { - "logs": [record.body for record in batch], - "start_time": self.session.init_timestamp, - "end_time": self.session.end_timestamp, - "is_capturing": not self._shutdown, - } # Send logs to API res = HttpClient.put( f"{self.session.config.endpoint}/v3/logs/{self.session.session_id}", - json.dumps(log_data).encode("utf-8"), + safe_serialize(batch).encode("utf-8"), api_key=self.session.config.api_key, jwt=self.session.jwt, ) @@ -228,7 +220,7 @@ def export(self, batch: Sequence[LogRecord]) -> LogExportResult: return LogExportResult.SUCCESS if res.code == 200 else LogExportResult.FAILURE except Exception as e: - logger.error(f"Failed to export logs: {e}") + logger.exception("Failed to export logs", exc_info=e) return LogExportResult.FAILURE def force_flush(self, timeout_millis: Optional[int] = None) -> bool: @@ -334,10 +326,7 @@ def __init__( # Initialize logging components self._log_exporter = SessionLogExporter(session=self) - self._log_handler, self._log_processor = setup_session_telemetry( - str(session_id), - self._log_exporter - ) + self._log_handler, self._log_processor = setup_session_telemetry(str(session_id), self._log_exporter) logger.addHandler(self._log_handler) def set_video(self, video: str) -> None: From aacf0fbe1c6729172368b1ba13b7665d4f8f7f60 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 00:12:30 +0200 Subject: [PATCH 22/31] ruff Signed-off-by: Teo --- agentops/session.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/agentops/session.py b/agentops/session.py index 5fc1cb3f2..8bec4891c 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -25,8 +25,8 @@ from .exceptions import ApiServerException from .helpers import filter_unjsonable, get_ISO_time, safe_serialize from .http_client import HttpClient, Response -from .instrumentation import cleanup_session_telemetry, setup_session_telemetry from .log_config import logger +from .instrumentation import setup_session_telemetry, cleanup_session_telemetry """ OTEL Guidelines: @@ -326,7 +326,10 @@ def __init__( # Initialize logging components self._log_exporter = SessionLogExporter(session=self) - self._log_handler, self._log_processor = setup_session_telemetry(str(session_id), self._log_exporter) + self._log_handler, self._log_processor = setup_session_telemetry( + str(session_id), + self._log_exporter + ) logger.addHandler(self._log_handler) def set_video(self, video: str) -> None: From 65a9b92c3ae575cc6f0be770f356de3998c21d48 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 01:57:26 +0200 Subject: [PATCH 23/31] fix: SessionLogExporter - appropriate typing of LogData and serialization Signed-off-by: Teo --- agentops/session.py | 50 ++++++++++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/agentops/session.py b/agentops/session.py index 8bec4891c..11260606b 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -8,12 +8,13 @@ from datetime import datetime, timezone from decimal import ROUND_HALF_UP, Decimal from enum import Enum -from typing import Any, Dict, List, Optional, Sequence, Union, cast +from typing import Any, Dict, Generator, List, Optional, Sequence, Union, cast from uuid import UUID, uuid4 from opentelemetry import trace from opentelemetry.context import attach, detach, set_value from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler, LogRecord +from opentelemetry.sdk._logs._internal import LogData from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter, LogExportResult from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import ReadableSpan, TracerProvider @@ -25,8 +26,8 @@ from .exceptions import ApiServerException from .helpers import filter_unjsonable, get_ISO_time, safe_serialize from .http_client import HttpClient, Response +from .instrumentation import cleanup_session_telemetry, setup_session_telemetry from .log_config import logger -from .instrumentation import setup_session_telemetry, cleanup_session_telemetry """ OTEL Guidelines: @@ -201,27 +202,37 @@ def __init__(self, session: Session): self.session = session self._shutdown = False - def export(self, batch: Sequence[LogRecord]) -> LogExportResult: + def export(self, batch: Sequence[LogData]) -> LogExportResult: """Export the log records to the AgentOps backend.""" if self._shutdown: return LogExportResult.SUCCESS - try: - # Format logs for API - - # Send logs to API - res = HttpClient.put( - f"{self.session.config.endpoint}/v3/logs/{self.session.session_id}", - safe_serialize(batch).encode("utf-8"), - api_key=self.session.config.api_key, - jwt=self.session.jwt, - ) + # try: + if not batch: + return LogExportResult.SUCCESS - return LogExportResult.SUCCESS if res.code == 200 else LogExportResult.FAILURE + def __serialize(_entry: Union[LogRecord, LogData]) -> Dict[str, Any]: + # Why double encoding? [This is a quick workaround] + # Turns out safe_serialize() is not yet good enough to handle a variety of objects + # For instance: 'attributes': '<>' + if isinstance(_entry, LogRecord): + return json.loads(_entry.to_json()) + elif isinstance(_entry, LogData): + return json.loads(_entry.log_record.to_json()) + + # Send logs to API as a single JSON array + res = HttpClient.put( + f"{self.session.config.endpoint}/v3/logs/{self.session.session_id}", + (json.dumps([__serialize(it) for it in batch])).encode("utf-8"), + api_key=self.session.config.api_key, + jwt=self.session.jwt, + ) - except Exception as e: - logger.exception("Failed to export logs", exc_info=e) - return LogExportResult.FAILURE + return LogExportResult.SUCCESS if res.code == 200 else LogExportResult.FAILURE + + # except Exception as e: + # logger.exception("Failed to export logs", exc_info=e) + # return LogExportResult.FAILURE def force_flush(self, timeout_millis: Optional[int] = None) -> bool: """ @@ -326,10 +337,7 @@ def __init__( # Initialize logging components self._log_exporter = SessionLogExporter(session=self) - self._log_handler, self._log_processor = setup_session_telemetry( - str(session_id), - self._log_exporter - ) + self._log_handler, self._log_processor = setup_session_telemetry(str(session_id), self._log_exporter) logger.addHandler(self._log_handler) def set_video(self, video: str) -> None: From 4f28141473063e51414cad74a6ca9bdd92c9532e Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 01:57:49 +0200 Subject: [PATCH 24/31] fix(test_decorators): use real session uuid rather than literal "test_sesssion" Signed-off-by: Teo --- tests/unit/test_decorators.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_decorators.py b/tests/unit/test_decorators.py index e9f7942d7..29618643f 100644 --- a/tests/unit/test_decorators.py +++ b/tests/unit/test_decorators.py @@ -1,3 +1,4 @@ +import uuid import pytest from collections import namedtuple from typing import Tuple @@ -96,7 +97,7 @@ def test_json_serialization(self): This demonstrates @teocns's point that JSON serialization works fine with tuples, as they are naturally converted to lists during JSON serialization.""" config = Client()._config - session = Session(session_id="test_session", config=config) + session = Session(session_id=uuid.uuid4(), config=config) # Test with regular tuple direct_tuple = (1, "test") From e1ba7550c2093d9d9f963a19045aeb0c8d4c2e0d Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 02:23:29 +0200 Subject: [PATCH 25/31] feat(helpers.safe_serialize): + Enum serialization Signed-off-by: Teo --- agentops/helpers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/agentops/helpers.py b/agentops/helpers.py index ca0c4f0e3..c2e48c87a 100644 --- a/agentops/helpers.py +++ b/agentops/helpers.py @@ -1,3 +1,4 @@ +from enum import Enum import inspect import json from datetime import datetime, timezone @@ -67,6 +68,10 @@ def default(o): try: if isinstance(o, UUID): return str(o) + # Handle Enum types + elif isinstance(o, Enum): + return o.value + # Handle objects with attributes property that's dict-like elif hasattr(o, "model_dump_json"): return str(o.model_dump_json()) elif hasattr(o, "to_json"): From 0fc50b72794de0708b08cebb8f704015ac329439 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 03:46:19 +0200 Subject: [PATCH 26/31] fix: test_session x session log reqs Signed-off-by: Teo --- tests/unit/test_session.py | 60 +++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 26 deletions(-) diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index c4e590427..85de4d69a 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -8,6 +8,7 @@ import pytest import requests_mock from opentelemetry import trace +from opentelemetry._logs import SeverityNumber from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler, LogRecord from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter, LogExportResult from opentelemetry.sdk.trace import ReadableSpan @@ -655,7 +656,7 @@ def test_log_export_basic(self, mock_req): span_id=0x00000000DEADBEF0, trace_flags=0x01, severity_text="INFO", - severity_number=9, + severity_number=SeverityNumber.INFO, body="Test log message", resource=self.log_handler._logger_provider.resource, attributes={}, @@ -669,10 +670,11 @@ def test_log_export_basic(self, mock_req): # Verify the request assert len(mock_req.request_history) > 0 - last_request = mock_req.last_request.json() - assert "logs" in last_request - assert len(last_request["logs"]) == 1 - assert last_request["logs"][0] == "Test log message" + last_request = mock_req.request_history[-1] + assert last_request.path.startswith("/v3/logs") + lr_data = last_request.json() + assert len(lr_data) == 1 + assert lr_data[0]["body"] == "Test log message" def test_log_export_multiple_records(self, mock_req): """Test exporting multiple log records at once""" @@ -684,7 +686,7 @@ def test_log_export_multiple_records(self, mock_req): span_id=0x00000000DEADBEF0, trace_flags=0x01, severity_text="INFO", - severity_number=9, + severity_number=SeverityNumber.INFO, body=f"Test message {i}", resource=self.log_handler._logger_provider.resource, attributes={}, @@ -700,10 +702,13 @@ def test_log_export_multiple_records(self, mock_req): # Verify the request assert len(mock_req.request_history) > 0 - last_request = mock_req.last_request.json() - assert "logs" in last_request - assert len(last_request["logs"]) == 3 - assert last_request["logs"] == ["Test message 0", "Test message 1", "Test message 2"] + last_request = mock_req.request_history[-1] + assert last_request.path.startswith("/v3/logs") + lr_data = last_request.json() + assert len(lr_data) == 3 + assert lr_data[0]["body"] == "Test message 0" + assert lr_data[1]["body"] == "Test message 1" + assert lr_data[2]["body"] == "Test message 2" def test_log_export_after_shutdown(self, mock_req): """Test that export after shutdown returns success without sending request""" @@ -717,7 +722,7 @@ def test_log_export_after_shutdown(self, mock_req): span_id=0x00000000DEADBEF0, trace_flags=0x01, severity_text="INFO", - severity_number=9, + severity_number=SeverityNumber.INFO, body="Test log message", resource=self.log_handler._logger_provider.resource, attributes={}, @@ -739,7 +744,7 @@ def test_log_export_with_session_metadata(self, mock_req): span_id=0x00000000DEADBEF0, trace_flags=0x01, severity_text="INFO", - severity_number=9, + severity_number=SeverityNumber.INFO, body="Test log message", resource=self.log_handler._logger_provider.resource, attributes={}, @@ -750,11 +755,8 @@ def test_log_export_with_session_metadata(self, mock_req): assert result == LogExportResult.SUCCESS # Verify the request includes session metadata - last_request = mock_req.last_request.json() - assert "start_time" in last_request - assert "end_time" in last_request - assert "is_capturing" in last_request - assert last_request["is_capturing"] == True + last_request = mock_req.last_request.json()[0] + last_request['body'] == 'Test log message' class TestSessionLogging: @@ -782,13 +784,15 @@ def test_log_handler_installation(self, agentops_logger): """Test that the session's specific log handler is correctly installed""" # Get the handler that was created for this session session_handler = self.session._log_handler - + # Verify the handler exists and is a LoggingHandler assert isinstance(session_handler, LoggingHandler), "Session should have a LoggingHandler instance" - + # Verify this specific handler is in the logger's handlers - assert session_handler in agentops_logger.handlers, "Session's specific LoggingHandler should be in logger's handlers" - + assert ( + session_handler in agentops_logger.handlers + ), "Session's specific LoggingHandler should be in logger's handlers" + # Count how many times this specific handler appears handler_count = sum(1 for h in agentops_logger.handlers if h is session_handler) assert handler_count == 1, "Session's LoggingHandler should appear exactly once in logger's handlers" @@ -797,15 +801,19 @@ def test_log_handler_removal_on_session_end(self, agentops_logger): """Test that the session's specific log handler is removed when session ends""" # Get the handler that was created for this session this_session_logging_handler = self.session._log_handler - + # Verify handler exists before ending session - assert this_session_logging_handler in agentops_logger.handlers, "Session handler should be present before ending session" - + assert ( + this_session_logging_handler in agentops_logger.handlers + ), "Session handler should be present before ending session" + # End the session self.session.end_session("Success") - + # Verify the specific handler was removed - assert this_session_logging_handler not in agentops_logger.handlers, "Session handler should be removed after ending session" + assert ( + this_session_logging_handler not in agentops_logger.handlers + ), "Session handler should be removed after ending session" # def test_logging_with_session(self, mock_req): # """Test that logging works with an active session""" From 44260bfd93f34829b73859c8a6abd6f024d6a655 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 03:56:28 +0200 Subject: [PATCH 27/31] fix(test_sessions::test_add_tags): TypeError: list indices must be integers or slices, not str Signed-off-by: Teo --- tests/unit/test_session.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index 85de4d69a..eec401c84 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -315,6 +315,7 @@ def test_two_sessions(self, mock_req): assert len(request_json["session"]["tags"]) == 0 def test_add_tags(self, mock_req): + """Test adding tags to multiple sessions""" # Arrange session_1_tags = ["session-1"] session_2_tags = ["session-2"] @@ -333,13 +334,19 @@ def test_add_tags(self, mock_req): session_2.end_session(end_state) time.sleep(0.15) - # Assert 3 requests, 1 for session init, 1 for event, 1 for end session - req1 = mock_req.request_history[-1].json() - req2 = mock_req.request_history[-2].json() + # Find update session requests + update_requests = [r for r in mock_req.request_history if "/v2/update_session" in r.url] + assert len(update_requests) >= 2 - session_1_req = req1 if req1["session"]["session_id"] == session_1.session_id else req2 - session_2_req = req2 if req2["session"]["session_id"] == session_2.session_id else req1 + # Get the last two update requests + req1 = update_requests[-1].json() + req2 = update_requests[-2].json() + # Match requests to sessions + session_1_req = req1 if req1["session"]["session_id"] == str(session_1.session_id) else req2 + session_2_req = req2 if req2["session"]["session_id"] == str(session_2.session_id) else req1 + + # Assert assert session_1_req["session"]["end_state"] == end_state assert session_2_req["session"]["end_state"] == end_state From 84f24f8f52e2f1034c3edaf7579f2286b5183d33 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 04:12:39 +0200 Subject: [PATCH 28/31] ruff Signed-off-by: Teo --- agentops/http_client.py | 11 ++++---- agentops/instrumentation.py | 36 +++++++++++++------------ agentops/log_capture.py | 42 ++++++++++++------------------ tests/unit/conftest.py | 2 +- tests/unit/test_instrumentation.py | 15 ++++++----- tests/unit/test_log_capture.py | 1 + tests/unit/test_session.py | 4 +-- 7 files changed, 54 insertions(+), 57 deletions(-) diff --git a/agentops/http_client.py b/agentops/http_client.py index 1f0fed5da..2264af1b5 100644 --- a/agentops/http_client.py +++ b/agentops/http_client.py @@ -127,11 +127,8 @@ def _make_request( try: headers = cls._prepare_headers(api_key, parent_key, jwt, header) session = cls.get_session() - - kwargs = { - "headers": headers, - "timeout": 20 - } + + kwargs = {"headers": headers, "timeout": 20} if payload is not None: kwargs["data"] = payload @@ -191,7 +188,9 @@ def post( header: Optional[Dict[str, str]] = None, ) -> Response: """Make HTTP POST request""" - return cls._make_request("POST", url, api_key=api_key, parent_key=parent_key, jwt=jwt, header=header, payload=payload) + return cls._make_request( + "POST", url, api_key=api_key, parent_key=parent_key, jwt=jwt, header=header, payload=payload + ) @classmethod def put( diff --git a/agentops/instrumentation.py b/agentops/instrumentation.py index a22e2a8e3..ce6e264c4 100644 --- a/agentops/instrumentation.py +++ b/agentops/instrumentation.py @@ -32,20 +32,22 @@ # Map of session_id to LoggingHandler _session_handlers: Dict[UUID, LoggingHandler] = {} + def get_session_handler(session_id: UUID) -> Optional[LoggingHandler]: """Get the logging handler for a specific session. - + Args: session_id: The UUID of the session - + Returns: The session's LoggingHandler if it exists, None otherwise """ return _session_handlers.get(session_id) + def set_session_handler(session_id: UUID, handler: Optional[LoggingHandler]) -> None: """Set or remove the logging handler for a session. - + Args: session_id: The UUID of the session handler: The handler to set, or None to remove @@ -55,18 +57,19 @@ def set_session_handler(session_id: UUID, handler: Optional[LoggingHandler]) -> else: _session_handlers[session_id] = handler + def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandler, BatchLogRecordProcessor]: """Set up OpenTelemetry logging components for a new session. - + This function creates the necessary components to capture and export logs for a specific session: - A LoggerProvider with session-specific resource attributes - A BatchLogRecordProcessor to batch and export logs - A LoggingHandler to capture logs and forward them to the processor - + Args: session_id: Unique identifier for the session, used to tag telemetry data log_exporter: SessionLogExporter instance that handles sending logs to AgentOps backend - + Returns: Tuple containing: - LoggingHandler: Handler that should be added to the logger @@ -75,50 +78,51 @@ def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandl # Create logging components resource = Resource.create({SERVICE_NAME: f"agentops.session.{session_id}"}) logger_provider = LoggerProvider(resource=resource) - + # Create processor and handler log_processor = BatchLogRecordProcessor(log_exporter) logger_provider.add_log_record_processor(log_processor) # Add processor to provider - + log_handler = LoggingHandler( level=logging.INFO, logger_provider=logger_provider, ) - + # Register handler with session set_session_handler(session_id, log_handler) - + return log_handler, log_processor + def cleanup_session_telemetry(log_handler: LoggingHandler, log_processor: BatchLogRecordProcessor) -> None: """Clean up OpenTelemetry logging components when a session ends. - + This function ensures proper cleanup by: 1. Removing the handler from the logger 2. Closing the handler to free resources 3. Flushing any pending logs in the processor 4. Shutting down the processor - + Args: log_handler: The session's LoggingHandler to be removed and closed log_processor: The session's BatchLogRecordProcessor to be flushed and shutdown - + Used by: Session.end_session() to clean up logging components when the session ends """ from agentops.log_config import logger - + try: # Remove and close handler logger.removeHandler(log_handler) log_handler.close() - + # Remove from session handlers for session_id, handler in list(_session_handlers.items()): if handler is log_handler: set_session_handler(session_id, None) break - + # Shutdown processor log_processor.force_flush(timeout_millis=5000) log_processor.shutdown() diff --git a/agentops/log_capture.py b/agentops/log_capture.py index be885734c..567c498bd 100644 --- a/agentops/log_capture.py +++ b/agentops/log_capture.py @@ -259,59 +259,50 @@ def flush(self): class SessionLogHandler(LoggingHandler): """A logging handler that captures logs for a specific session without altering output. - + This handler captures logs and associates them with a specific session, while allowing normal logging behavior to continue unaffected. """ - + def __init__(self, session_id: UUID, logger_provider=None): super().__init__(level=logging.INFO, logger_provider=logger_provider) self.session_id = session_id - self.log_counts: Dict[str, int] = { - "INFO": 0, - "WARNING": 0, - "ERROR": 0, - "DEBUG": 0, - "CRITICAL": 0 - } - + self.log_counts: Dict[str, int] = {"INFO": 0, "WARNING": 0, "ERROR": 0, "DEBUG": 0, "CRITICAL": 0} + def emit(self, record: logging.LogRecord) -> None: """Emit a log record, capturing it for the session without altering normal output.""" try: # Count the log by level self.log_counts[record.levelname] += 1 - + # Create the log event with ANSI codes preserved msg = self.format(record) - + # Let the parent class handle sending to OTEL super().emit(record) - + except Exception: self.handleError(record) def install_session_handler(session: "Session") -> Optional[SessionLogHandler]: """Install a logging handler for a specific session. - + Args: session: The session to install the handler for - + Returns: The installed handler, or None if installation failed """ try: # Create handler with session's logger provider - handler = SessionLogHandler( - session_id=session.session_id, - logger_provider=session._logger_provider - ) - + handler = SessionLogHandler(session_id=session.session_id, logger_provider=session._logger_provider) + # Add handler to root logger to capture all logs logging.getLogger().addHandler(handler) - + return handler - + except Exception as e: logging.error(f"Failed to install session log handler: {e}") return None @@ -319,7 +310,7 @@ def install_session_handler(session: "Session") -> Optional[SessionLogHandler]: def remove_session_handler(handler: SessionLogHandler) -> None: """Remove a session's logging handler. - + Args: handler: The handler to remove """ @@ -358,17 +349,18 @@ class MockSession: try: # Test Rich formatting from rich.console import Console + console = Console(force_terminal=True) rprint = console.print rprint("[red]This is red text[/red]") rprint("[blue]Blue[/blue] and [green]green[/green] mixed") rprint("[bold red]Bold red[/bold red] and [italic blue]italic blue[/italic blue]") - + # Test raw ANSI codes print("\033[31mDirect red ANSI\033[0m\n") print("\033[34mBlue\033[0m and \033[32mgreen\033[0m mixed ANSI\n") print("\033[1;31mBold red ANSI\033[0m\n") - + # Test stderr with colors sys.stderr.write("\033[35mMagenta error\033[0m\n") sys.stderr.write("\033[33mYellow warning\033[0m\n") diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index bfeb796c1..2b6b17be1 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -82,7 +82,7 @@ def reauthorize_jwt_response(request, context): m.post(base_url + "/v2/reauthorize_jwt", json=reauthorize_jwt_response) m.post(base_url + "/v2/create_agent", json={"status": "success"}) # Use explicit regex pattern for logs endpoint to match any URL and session ID - logs_pattern = re.compile(r'.*/v3/logs/[0-9a-f-]{8}-[0-9a-f-]{4}-[0-9a-f-]{4}-[0-9a-f-]{4}-[0-9a-f-]{12}') + logs_pattern = re.compile(r".*/v3/logs/[0-9a-f-]{8}-[0-9a-f-]{4}-[0-9a-f-]{4}-[0-9a-f-]{4}-[0-9a-f-]{12}") m.put(logs_pattern, json={"status": "success"}) yield m diff --git a/tests/unit/test_instrumentation.py b/tests/unit/test_instrumentation.py index e80ba1855..5a91c6ac1 100644 --- a/tests/unit/test_instrumentation.py +++ b/tests/unit/test_instrumentation.py @@ -67,7 +67,7 @@ def test_logging_with_telemetry(self, mock_req, agentops_session): try: session_id = str(agentops_session.session_id) print(f"\nSession ID: {session_id}") - + log_exporter = SessionLogExporter(session=agentops_session) log_handler, log_processor = setup_session_telemetry(session_id, log_exporter) logger.addHandler(log_handler) @@ -89,12 +89,13 @@ def test_logging_with_telemetry(self, mock_req, agentops_session): print("\nRequest history:") for req in mock_req.request_history: print(f"Method: {req.method}, URL: {req.url}") - if hasattr(req, 'text'): + if hasattr(req, "text"): print(f"Body: {req.text}") # Verify the request was made to the logs endpoint - assert any(req.url.endswith(f"/v3/logs/{session_id}") for req in mock_req.request_history), \ - f"No request found for /v3/logs/{session_id} in {[req.url for req in mock_req.request_history]}" + assert any( + req.url.endswith(f"/v3/logs/{session_id}") for req in mock_req.request_history + ), f"No request found for /v3/logs/{session_id} in {[req.url for req in mock_req.request_history]}" finally: # Clean up @@ -128,11 +129,11 @@ def test_multiple_sessions_isolation(self, mock_req, agentops_session): # Set up two sessions session_id_1 = str(uuid4()) session_id_2 = str(uuid4()) - + log_exporter = SessionLogExporter(session=agentops_session) handler1, processor1 = setup_session_telemetry(session_id_1, log_exporter) handler2, processor2 = setup_session_telemetry(session_id_2, log_exporter) - + logger.addHandler(handler1) logger.addHandler(handler2) @@ -148,4 +149,4 @@ def test_multiple_sessions_isolation(self, mock_req, agentops_session): assert handler2 in logger.handlers # Clean up the other session - cleanup_session_telemetry(handler2, processor2) + cleanup_session_telemetry(handler2, processor2) diff --git a/tests/unit/test_log_capture.py b/tests/unit/test_log_capture.py index dcbd0f62b..cb8cf1e11 100644 --- a/tests/unit/test_log_capture.py +++ b/tests/unit/test_log_capture.py @@ -22,6 +22,7 @@ def session_id(): @pytest.fixture def mock_session(session_id, logger_provider): """Create a mock session with logging components""" + class MockSession: def __init__(self, session_id, logger_provider): self.session_id = session_id diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index eec401c84..28657e414 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -714,7 +714,7 @@ def test_log_export_multiple_records(self, mock_req): lr_data = last_request.json() assert len(lr_data) == 3 assert lr_data[0]["body"] == "Test message 0" - assert lr_data[1]["body"] == "Test message 1" + assert lr_data[1]["body"] == "Test message 1" assert lr_data[2]["body"] == "Test message 2" def test_log_export_after_shutdown(self, mock_req): @@ -763,7 +763,7 @@ def test_log_export_with_session_metadata(self, mock_req): # Verify the request includes session metadata last_request = mock_req.last_request.json()[0] - last_request['body'] == 'Test log message' + last_request["body"] == "Test log message" class TestSessionLogging: From 8dab0a27787aede3ead07c37f0ceae5dd6800758 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 04:19:49 +0200 Subject: [PATCH 29/31] oh this should've gone in #670 (test_record_action, assert on len requests) Signed-off-by: Teo --- tests/unit/test_record_action.py | 38 +++++++++++++++----------------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/tests/unit/test_record_action.py b/tests/unit/test_record_action.py index 0e781a578..062552685 100644 --- a/tests/unit/test_record_action.py +++ b/tests/unit/test_record_action.py @@ -136,7 +136,7 @@ def test_multiple_sessions_sync(self, mock_req): # Arrange @record_action(event_name=self.event_type) - def add_three(x, y, z=3): + def add_three(x, y, z=3, *, session=None): return x + y + z # Act @@ -181,7 +181,7 @@ async def test_multiple_sessions_async(self, mock_req): # Arrange @record_action(self.event_type) - async def async_add(x, y): + async def async_add(x, y, *, session=None): time.sleep(0.1) return x + y @@ -191,31 +191,29 @@ async def async_add(x, y): await async_add(1, 2, session=session_2) time.sleep(0.1) - # Assert - assert len(mock_req.request_history) == 5 + # Find action requests + action_requests = [r for r in mock_req.request_history if "/v2/create_events" in r.url] + assert len(action_requests) >= 2 # Should have at least 2 action requests - request_json = mock_req.last_request.json() - assert mock_req.last_request.headers["X-Agentops-Api-Key"] == self.api_key - assert ( - mock_req.last_request.headers["Authorization"] - == f"Bearer {mock_req.session_jwts[str(session_2.session_id)]}" - ) + # Verify session_2's request (last request) + last_request = action_requests[-1] + assert last_request.headers["X-Agentops-Api-Key"] == self.api_key + assert last_request.headers["Authorization"] == f"Bearer {mock_req.session_jwts[str(session_2.session_id)]}" + request_json = last_request.json() assert request_json["events"][0]["action_type"] == self.event_type assert request_json["events"][0]["params"] == {"x": 1, "y": 2} assert request_json["events"][0]["returns"] == 3 - second_last_request_json = mock_req.request_history[-2].json() - assert mock_req.request_history[-2].headers["X-Agentops-Api-Key"] == self.api_key + # Verify session_1's request (second to last request) + second_last_request = action_requests[-2] + assert second_last_request.headers["X-Agentops-Api-Key"] == self.api_key assert ( - mock_req.request_history[-2].headers["Authorization"] - == f"Bearer {mock_req.session_jwts[str(session_1.session_id)]}" + second_last_request.headers["Authorization"] == f"Bearer {mock_req.session_jwts[str(session_1.session_id)]}" ) - assert second_last_request_json["events"][0]["action_type"] == self.event_type - assert second_last_request_json["events"][0]["params"] == { - "x": 1, - "y": 2, - } - assert second_last_request_json["events"][0]["returns"] == 3 + request_json = second_last_request.json() + assert request_json["events"][0]["action_type"] == self.event_type + assert request_json["events"][0]["params"] == {"x": 1, "y": 2} + assert request_json["events"][0]["returns"] == 3 session_1.end_session(end_state="Success") session_2.end_session(end_state="Success") From d73da32e08d6dd31c52932d050bb7f99b3b193e9 Mon Sep 17 00:00:00 2001 From: Teo Date: Thu, 6 Feb 2025 18:16:53 +0200 Subject: [PATCH 30/31] tests: fix correct ussage of mock url (base_url) rather than hardcoded url Signed-off-by: Teo --- tests/unit/test_canary.py | 4 ++-- tests/unit/test_pre_init.py | 4 ++-- tests/unit/test_record_action.py | 4 ++-- tests/unit/test_record_tool.py | 4 ++-- tests/unit/test_teardown.py | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/unit/test_canary.py b/tests/unit/test_canary.py index 90fcc65de..c55c70ab9 100644 --- a/tests/unit/test_canary.py +++ b/tests/unit/test_canary.py @@ -8,8 +8,8 @@ class TestCanary: - def setup_method(self): - self.url = "https://api.agentops.ai" + def setup_method(self,base_url): + self.url = base_url self.api_key = "11111111-1111-4111-8111-111111111111" agentops.init(api_key=self.api_key, max_wait_time=500, auto_start_session=False) diff --git a/tests/unit/test_pre_init.py b/tests/unit/test_pre_init.py index 5e8ce0684..a4f8ce8f6 100644 --- a/tests/unit/test_pre_init.py +++ b/tests/unit/test_pre_init.py @@ -17,8 +17,8 @@ def __init__(self): class TestPreInit: - def setup_method(self): - self.url = "https://api.agentops.ai" + def setup_method(self,base_url): + self.url = base_url self.api_key = "11111111-1111-4111-8111-111111111111" def test_track_agent(self, mock_req): diff --git a/tests/unit/test_record_action.py b/tests/unit/test_record_action.py index 062552685..6bcb46ccd 100644 --- a/tests/unit/test_record_action.py +++ b/tests/unit/test_record_action.py @@ -8,8 +8,8 @@ class TestRecordAction: - def setup_method(self): - self.url = "https://api.agentops.ai" + def setup_method(self, base_url): + self.url = base_url self.api_key = "11111111-1111-4111-8111-111111111111" self.event_type = "test_event_type" agentops.init(self.api_key, max_wait_time=50, auto_start_session=False) diff --git a/tests/unit/test_record_tool.py b/tests/unit/test_record_tool.py index f2fca9023..b7979f82b 100644 --- a/tests/unit/test_record_tool.py +++ b/tests/unit/test_record_tool.py @@ -12,8 +12,8 @@ class TestRecordTool: - def setup_method(self): - self.url = "https://api.agentops.ai" + def setup_method(self, base_url): + self.url = base_url self.api_key = "11111111-1111-4111-8111-111111111111" self.tool_name = "test_tool_name" agentops.init(self.api_key, max_wait_time=5, auto_start_session=False) diff --git a/tests/unit/test_teardown.py b/tests/unit/test_teardown.py index eadb5b549..b646f62bf 100644 --- a/tests/unit/test_teardown.py +++ b/tests/unit/test_teardown.py @@ -5,8 +5,8 @@ class TestSessions: - def test_exit(self, mock_req): - url = "https://api.agentops.ai" + def test_exit(self, mock_req, base_url): + url = base_url api_key = "11111111-1111-4111-8111-111111111111" tool_name = "test_tool_name" agentops.init(api_key, max_wait_time=5, auto_start_session=False) From d529660de5386a3b2c63466399a075bfdd07ee21 Mon Sep 17 00:00:00 2001 From: Teo Date: Wed, 12 Feb 2025 22:22:45 +0200 Subject: [PATCH 31/31] save Signed-off-by: Teo --- agentops/instrumentation.py | 28 +++++++++++++++--------- agentops/log_capture.py | 27 +++++++---------------- agentops/session.py | 7 +++--- pyproject.toml | 2 ++ tests/integration/conftest.py | 14 +++++++++--- tests/integration/test_logcap.py | 37 ++++++++++++++++++++++++++++++++ tests/unit/conftest.py | 6 +++--- 7 files changed, 83 insertions(+), 38 deletions(-) create mode 100644 tests/integration/test_logcap.py diff --git a/agentops/instrumentation.py b/agentops/instrumentation.py index ce6e264c4..16f7029b8 100644 --- a/agentops/instrumentation.py +++ b/agentops/instrumentation.py @@ -8,7 +8,7 @@ from opentelemetry import trace from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler from opentelemetry.sdk._logs.export import BatchLogRecordProcessor -from opentelemetry.sdk.resources import Resource, SERVICE_NAME +from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import SpanProcessor, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter from opentelemetry.sdk.trace.sampling import ParentBased, Sampler, TraceIdRatioBased @@ -58,7 +58,7 @@ def set_session_handler(session_id: UUID, handler: Optional[LoggingHandler]) -> _session_handlers[session_id] = handler -def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandler, BatchLogRecordProcessor]: +def setup_session_telemetry(session, log_exporter) -> tuple[LoggingHandler, BatchLogRecordProcessor]: """Set up OpenTelemetry logging components for a new session. This function creates the necessary components to capture and export logs for a specific session: @@ -76,22 +76,30 @@ def setup_session_telemetry(session_id: str, log_exporter) -> tuple[LoggingHandl - BatchLogRecordProcessor: Processor that batches and exports logs """ # Create logging components - resource = Resource.create({SERVICE_NAME: f"agentops.session.{session_id}"}) + resource = Resource.create({SERVICE_NAME: f"agentops.session.{session.session_id}"}) logger_provider = LoggerProvider(resource=resource) # Create processor and handler log_processor = BatchLogRecordProcessor(log_exporter) logger_provider.add_log_record_processor(log_processor) # Add processor to provider - log_handler = LoggingHandler( - level=logging.INFO, - logger_provider=logger_provider, - ) + from agentops.log_capture import LogCapture - # Register handler with session - set_session_handler(session_id, log_handler) + logcap = LogCapture( + session, + ) - return log_handler, log_processor + logcap.start() + + # log_handler = LoggingHandler( + # level=logging.INFO, + # logger_provider=logger_provider, + # ) + # + # # Register handler with session + # set_session_handler(session_id, log_handler) + # + # return log_handler, log_processor def cleanup_session_telemetry(log_handler: LoggingHandler, log_processor: BatchLogRecordProcessor) -> None: diff --git a/agentops/log_capture.py b/agentops/log_capture.py index 567c498bd..cc21596b4 100644 --- a/agentops/log_capture.py +++ b/agentops/log_capture.py @@ -23,7 +23,7 @@ class LogCapture: If no telemetry manager is available, creates a standalone logging setup. Attributes: - session_id + session: The session object stdout_line_count: Number of lines written to stdout stderr_line_count: Number of lines written to stderr log_level_counts: Count of log messages by level @@ -32,7 +32,7 @@ class LogCapture: is_capturing: Whether capture is currently active """ - session_id: UUID + session: "Session" stdout_line_count: int = field(default=0) stderr_line_count: int = field(default=0) log_level_counts: Dict[str, int] = field( @@ -50,7 +50,6 @@ class LogCapture: _handler: Optional[LoggingHandler] = field(default=None, init=False, repr=False) _logger_provider: Optional[LoggerProvider] = field(default=None, init=False, repr=False) _owns_handler: bool = field(default=False, init=False, repr=False) - _session: Optional["Session"] = field(default=None, init=False, repr=False) def __post_init__(self): """Initialize loggers after dataclass initialization""" @@ -65,16 +64,9 @@ def __post_init__(self): logger.handlers.clear() @property - def session(self) -> Optional["Session"]: - """Get the associated session instance""" - if self._session is None: - from agentops.session import get_active_sessions - - for session in get_active_sessions(): - if session.session_id == self.session_id: - self._session = session - break - return self._session + def session_id(self) -> UUID: + """Get the session ID from the session object""" + return self.session.session_id def start(self): """Start capturing output using OTEL logging handler""" @@ -82,7 +74,7 @@ def start(self): return if not self.session: - raise ValueError(f"No active session found with ID {self.session_id}") + raise ValueError("No session provided") from agentops.helpers import get_ISO_time @@ -99,13 +91,10 @@ def start(self): # Use session's resource attributes if available resource_attrs = {"service.name": "agentops", "session.id": str(self.session_id)} - # resource_attrs.update(config.resource_attributes) - # Setup logger provider with console exporter resource = Resource.create(resource_attrs) self._logger_provider = LoggerProvider(resource=resource) - exporter = ConsoleLogExporter() - self._logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) + self._logger_provider.add_log_record_processor(BatchLogRecordProcessor(self.session._log_exporter)) self._handler = LoggingHandler( level=logging.INFO, @@ -344,7 +333,7 @@ class MockSession: add_session(session) # Add session to registry so it can be found # Create and start capture - capture = LogCapture(session_id=session.session_id) + capture = LogCapture(session=session) capture.start() try: # Test Rich formatting diff --git a/agentops/session.py b/agentops/session.py index 11260606b..676a6121e 100644 --- a/agentops/session.py +++ b/agentops/session.py @@ -337,8 +337,8 @@ def __init__( # Initialize logging components self._log_exporter = SessionLogExporter(session=self) - self._log_handler, self._log_processor = setup_session_telemetry(str(session_id), self._log_exporter) - logger.addHandler(self._log_handler) + self._log_processor = setup_session_telemetry(self, self._log_exporter) + # logger.addHandler(self._log_handler) def set_video(self, video: str) -> None: """ @@ -433,7 +433,7 @@ def end_session( except Exception as e: logger.exception(f"Error during session end: {e}") finally: - active_sessions.remove(self) # First thing, get rid of the session + remove_session(self) logger.info( colored( @@ -631,6 +631,7 @@ def _start_session(self): ) ) + add_session(self) return True except ApiServerException as e: return logger.error(f"Could not start session - {e}") diff --git a/pyproject.toml b/pyproject.toml index 85e91c5d3..9c86404d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,6 +118,8 @@ pythonpath = ["."] faulthandler_timeout = 30 # Reduced from 60 timeout = 60 # Reduced from 300 disable_socket = true # Add this to prevent hanging on socket cleanup +log_cli = true # Enable logging to console +log_cli_level = "INFO" # Set log level to INFO [tool.ruff] line-length = 120 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 90fda319b..63471aff1 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -17,9 +17,17 @@ @pytest.fixture -def agentops_session(): - agentops.start_session() - +def agentops_init(): + agentops.init(auto_start_session=False) yield + +@pytest.fixture +def agentops_session(agentops_init): + session = agentops.start_session() + + assert session, "Failed agentops.start_session() returned None." + + yield session + agentops.end_all_sessions() diff --git a/tests/integration/test_logcap.py b/tests/integration/test_logcap.py new file mode 100644 index 000000000..e64e87d60 --- /dev/null +++ b/tests/integration/test_logcap.py @@ -0,0 +1,37 @@ +from agentops.log_capture import LogCapture + + +def test_logcap(agentops_session): + import os + import sys + import time + from dataclasses import dataclass + from uuid import uuid4 + + session = agentops_session + capture = LogCapture(session_id=session.session_id) + capture.start() + try: + # Test Rich formatting + from rich.console import Console + + console = Console(force_terminal=True) + rprint = console.print + rprint("[red]This is red text[/red]") + rprint("[blue]Blue[/blue] and [green]green[/green] mixed") + rprint("[bold red]Bold red[/bold red] and [italic blue]italic blue[/italic blue]") + + # Test raw ANSI codes + print("\033[31mDirect red ANSI\033[0m\n") + print("\033[34mBlue\033[0m and \033[32mgreen\033[0m mixed ANSI\n") + print("\033[1;31mBold red ANSI\033[0m\n") + + # Test stderr with colors + sys.stderr.write("\033[35mMagenta error\033[0m\n") + sys.stderr.write("\033[33mYellow warning\033[0m\n") + + finally: + # Stop capture and show normal output is restored + capture.stop() + # print("\nCapture stopped - this prints normally to stdout") + # sys.stderr.write("This error goes normally to stderr\n") diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 2b6b17be1..189939760 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,8 +1,8 @@ import contextlib +import re import uuid from collections import defaultdict from typing import Dict, Iterator, List -import re import pytest import requests_mock @@ -89,8 +89,8 @@ def reauthorize_jwt_response(request, context): @pytest.fixture -def agentops_init(): - agentops.init() +def agentops_init(api_key, base_url): + agentops.init(api_key=api_key, endpoint=base_url, auto_start_session=False) @pytest.fixture