diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0cb4cb3d6..5776eb63d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,6 +73,7 @@ jobs: - "3.11" - "3.12" - "3.13" + - "3.14" name: Test on Python version ${{ matrix.python-version }} steps: diff --git a/langfuse/_client/client.py b/langfuse/_client/client.py index 2b3d6671c..44ccd767b 100644 --- a/langfuse/_client/client.py +++ b/langfuse/_client/client.py @@ -2972,7 +2972,7 @@ def create_dataset( ) langfuse_logger.debug(f"Creating datasets {body}") - return self.api.datasets.create(request=body) + return cast(Dataset, self.api.datasets.create(request=body)) except Error as e: handle_fern_exception(e) @@ -3024,17 +3024,17 @@ def create_dataset_item( """ try: body = CreateDatasetItemRequest( - datasetName=dataset_name, + dataset_name=dataset_name, input=input, - expectedOutput=expected_output, + expected_output=expected_output, metadata=metadata, - sourceTraceId=source_trace_id, - sourceObservationId=source_observation_id, + source_trace_id=source_trace_id, + source_observation_id=source_observation_id, status=status, id=id, ) langfuse_logger.debug(f"Creating dataset item {body}") - return self.api.dataset_items.create(request=body) + return cast(DatasetItem, self.api.dataset_items.create(request=body)) except Error as e: handle_fern_exception(e) raise e @@ -3395,7 +3395,7 @@ def create_prompt( labels=labels, tags=tags, config=config or {}, - commitMessage=commit_message, + commit_message=commit_message, type="chat", ) ) @@ -3415,7 +3415,7 @@ def create_prompt( labels=labels, tags=tags, config=config or {}, - commitMessage=commit_message, + commit_message=commit_message, type="text", ) diff --git a/langfuse/_client/datasets.py b/langfuse/_client/datasets.py index beb1248ba..e428ea3fb 100644 --- a/langfuse/_client/datasets.py +++ b/langfuse/_client/datasets.py @@ -131,11 +131,11 @@ def run( self.langfuse.api.dataset_run_items.create( request=CreateDatasetRunItemRequest( - runName=run_name, - datasetItemId=self.id, - traceId=span.trace_id, + run_name=run_name, + dataset_item_id=self.id, + trace_id=span.trace_id, metadata=run_metadata, - runDescription=run_description, + run_description=run_description, ) ) diff --git a/langfuse/_client/observe.py b/langfuse/_client/observe.py index afd969201..e8786a0e0 100644 --- a/langfuse/_client/observe.py +++ b/langfuse/_client/observe.py @@ -589,7 +589,9 @@ def __next__(self) -> Any: raise # Re-raise StopIteration except Exception as e: - self.span.update(level="ERROR", status_message=str(e) or type(e).__name__).end() + self.span.update( + level="ERROR", status_message=str(e) or type(e).__name__ + ).end() raise @@ -654,6 +656,8 @@ async def __anext__(self) -> Any: raise # Re-raise StopAsyncIteration except Exception as e: - self.span.update(level="ERROR", status_message=str(e) or type(e).__name__).end() + self.span.update( + level="ERROR", status_message=str(e) or type(e).__name__ + ).end() raise diff --git a/langfuse/_task_manager/media_manager.py b/langfuse/_task_manager/media_manager.py index 1a32e3d60..833c5138c 100644 --- a/langfuse/_task_manager/media_manager.py +++ b/langfuse/_task_manager/media_manager.py @@ -220,12 +220,12 @@ def _process_upload_media_job( upload_url_response = self._request_with_backoff( self._api_client.media.get_upload_url, request=GetMediaUploadUrlRequest( - contentLength=data["content_length"], - contentType=cast(MediaContentType, data["content_type"]), - sha256Hash=data["content_sha256_hash"], + content_length=data["content_length"], + content_type=cast(MediaContentType, data["content_type"]), + sha_256_hash=data["content_sha256_hash"], field=data["field"], - traceId=data["trace_id"], - observationId=data["observation_id"], + trace_id=data["trace_id"], + observation_id=data["observation_id"], ), ) @@ -267,10 +267,10 @@ def _process_upload_media_job( self._api_client.media.patch, media_id=data["media_id"], request=PatchMediaBody( - uploadedAt=_get_timestamp(), - uploadHttpStatus=upload_response.status_code, - uploadHttpError=upload_response.text, - uploadTimeMs=upload_time_ms, + uploaded_at=_get_timestamp(), + upload_http_status=upload_response.status_code, + upload_http_error=upload_response.text, + upload_time_ms=upload_time_ms, ), ) diff --git a/langfuse/_task_manager/score_ingestion_consumer.py b/langfuse/_task_manager/score_ingestion_consumer.py index 1a5b61f91..8a495cf91 100644 --- a/langfuse/_task_manager/score_ingestion_consumer.py +++ b/langfuse/_task_manager/score_ingestion_consumer.py @@ -7,18 +7,14 @@ from typing import Any, List, Optional import backoff - -from ..version import __version__ as langfuse_version - -try: - import pydantic.v1 as pydantic -except ImportError: - import pydantic # type: ignore +import pydantic from langfuse._utils.parse_error import handle_exception from langfuse._utils.request import APIError, LangfuseClient from langfuse._utils.serializer import EventSerializer +from ..version import __version__ as langfuse_version + MAX_EVENT_SIZE_BYTES = int(os.environ.get("LANGFUSE_MAX_EVENT_SIZE_BYTES", 1_000_000)) MAX_BATCH_SIZE_BYTES = int(os.environ.get("LANGFUSE_MAX_BATCH_SIZE_BYTES", 2_500_000)) @@ -79,7 +75,7 @@ def _next(self) -> list: # convert pydantic models to dicts if "body" in event and isinstance(event["body"], pydantic.BaseModel): - event["body"] = event["body"].dict(exclude_none=True) + event["body"] = event["body"].model_dump(exclude_none=True) item_size = self._get_item_size(event) @@ -156,7 +152,7 @@ def _upload_batch(self, batch: List[Any]) -> None: sdk_name="python", sdk_version=langfuse_version, public_key=self._public_key, - ).dict() + ).model_dump() @backoff.on_exception( backoff.expo, Exception, max_tries=self._max_retries, logger=None diff --git a/langfuse/api/.fern/metadata.json b/langfuse/api/.fern/metadata.json new file mode 100644 index 000000000..7a0a1d648 --- /dev/null +++ b/langfuse/api/.fern/metadata.json @@ -0,0 +1,14 @@ +{ + "cliVersion": "0.116.0", + "generatorName": "fernapi/fern-python-sdk", + "generatorVersion": "4.36.0", + "generatorConfig": { + "client_class_name": "FernLangfuse", + "improved_imports": false, + "inline_request_params": false, + "pydantic_config": { + "require_optional_fields": false, + "use_str_enums": false + } + } +} \ No newline at end of file diff --git a/langfuse/api/README.md b/langfuse/api/README.md deleted file mode 100644 index 9e8fef6d4..000000000 --- a/langfuse/api/README.md +++ /dev/null @@ -1,149 +0,0 @@ -# Langfuse Python Library - -[![fern shield](https://img.shields.io/badge/%F0%9F%8C%BF-Built%20with%20Fern-brightgreen)](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=Langfuse%2FPython) -[![pypi](https://img.shields.io/pypi/v/langfuse)](https://pypi.python.org/pypi/langfuse) - -The Langfuse Python library provides convenient access to the Langfuse APIs from Python. - -## Installation - -```sh -pip install langfuse -``` - -## Usage - -Instantiate and use the client with the following: - -```python -from langfuse import CreateAnnotationQueueRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.create_queue( - request=CreateAnnotationQueueRequest( - name="name", - score_config_ids=["scoreConfigIds", "scoreConfigIds"], - ), -) -``` - -## Async Client - -The SDK also exports an `async` client so that you can make non-blocking calls to our API. - -```python -import asyncio - -from langfuse import CreateAnnotationQueueRequest -from langfuse.client import AsyncFernLangfuse - -client = AsyncFernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) - - -async def main() -> None: - await client.annotation_queues.create_queue( - request=CreateAnnotationQueueRequest( - name="name", - score_config_ids=["scoreConfigIds", "scoreConfigIds"], - ), - ) - - -asyncio.run(main()) -``` - -## Exception Handling - -When the API returns a non-success status code (4xx or 5xx response), a subclass of the following error -will be thrown. - -```python -from .api_error import ApiError - -try: - client.annotation_queues.create_queue(...) -except ApiError as e: - print(e.status_code) - print(e.body) -``` - -## Advanced - -### Retries - -The SDK is instrumented with automatic retries with exponential backoff. A request will be retried as long -as the request is deemed retriable and the number of retry attempts has not grown larger than the configured -retry limit (default: 2). - -A request is deemed retriable when any of the following HTTP status codes is returned: - -- [408](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408) (Timeout) -- [429](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429) (Too Many Requests) -- [5XX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500) (Internal Server Errors) - -Use the `max_retries` request option to configure this behavior. - -```python -client.annotation_queues.create_queue(...,{ - max_retries=1 -}) -``` - -### Timeouts - -The SDK defaults to a 60 second timeout. You can configure this with a timeout option at the client or request level. - -```python - -from langfuse.client import FernLangfuse - -client = FernLangfuse(..., { timeout=20.0 }, ) - - -# Override timeout for a specific method -client.annotation_queues.create_queue(...,{ - timeout_in_seconds=1 -}) -``` - -### Custom Client - -You can override the `httpx` client to customize it for your use-case. Some common use-cases include support for proxies -and transports. -```python -import httpx -from langfuse.client import FernLangfuse - -client = FernLangfuse( - ..., - http_client=httpx.Client( - proxies="http://my.test.proxy.example.com", - transport=httpx.HTTPTransport(local_address="0.0.0.0"), - ), -) -``` - -## Contributing - -While we value open-source contributions to this SDK, this library is generated programmatically. -Additions made directly to this library would have to be moved over to our generation code, -otherwise they would be overwritten upon the next generated release. Feel free to open a PR as -a proof of concept, but know that we will not be able to merge it as-is. We suggest opening -an issue first to discuss with us! - -On the other hand, contributions to the README are always very welcome! diff --git a/langfuse/api/__init__.py b/langfuse/api/__init__.py index 451ad991e..fd1413022 100644 --- a/langfuse/api/__init__.py +++ b/langfuse/api/__init__.py @@ -1,250 +1,522 @@ # This file was auto-generated by Fern from our API Definition. -from .resources import ( - AccessDeniedError, - AnnotationQueue, - AnnotationQueueAssignmentRequest, - AnnotationQueueItem, - AnnotationQueueObjectType, - AnnotationQueueStatus, - ApiKeyDeletionResponse, - ApiKeyList, - ApiKeyResponse, - ApiKeySummary, - AuthenticationScheme, - BaseEvent, - BasePrompt, - BaseScore, - BaseScoreV1, - BlobStorageExportFrequency, - BlobStorageExportMode, - BlobStorageIntegrationDeletionResponse, - BlobStorageIntegrationFileType, - BlobStorageIntegrationResponse, - BlobStorageIntegrationType, - BlobStorageIntegrationsResponse, - BooleanScore, - BooleanScoreV1, - BulkConfig, - CategoricalScore, - CategoricalScoreV1, - ChatMessage, - ChatMessageWithPlaceholders, - ChatMessageWithPlaceholders_Chatmessage, - ChatMessageWithPlaceholders_Placeholder, - ChatPrompt, - Comment, - CommentObjectType, - ConfigCategory, - CreateAnnotationQueueAssignmentResponse, - CreateAnnotationQueueItemRequest, - CreateAnnotationQueueRequest, - CreateBlobStorageIntegrationRequest, - CreateChatPromptRequest, - CreateCommentRequest, - CreateCommentResponse, - CreateDatasetItemRequest, - CreateDatasetRequest, - CreateDatasetRunItemRequest, - CreateEventBody, - CreateEventEvent, - CreateGenerationBody, - CreateGenerationEvent, - CreateModelRequest, - CreateObservationEvent, - CreatePromptRequest, - CreatePromptRequest_Chat, - CreatePromptRequest_Text, - CreateScoreConfigRequest, - CreateScoreRequest, - CreateScoreResponse, - CreateScoreValue, - CreateSpanBody, - CreateSpanEvent, - CreateTextPromptRequest, - Dataset, - DatasetItem, - DatasetRun, - DatasetRunItem, - DatasetRunWithItems, - DatasetStatus, - DeleteAnnotationQueueAssignmentResponse, - DeleteAnnotationQueueItemResponse, - DeleteDatasetItemResponse, - DeleteDatasetRunResponse, - DeleteMembershipRequest, - DeleteTraceResponse, - EmptyResponse, - Error, - FilterConfig, - GetCommentsResponse, - GetMediaResponse, - GetMediaUploadUrlRequest, - GetMediaUploadUrlResponse, - GetScoresResponse, - GetScoresResponseData, - GetScoresResponseDataBoolean, - GetScoresResponseDataCategorical, - GetScoresResponseDataNumeric, - GetScoresResponseData_Boolean, - GetScoresResponseData_Categorical, - GetScoresResponseData_Numeric, - GetScoresResponseTraceData, - HealthResponse, - IngestionError, - IngestionEvent, - IngestionEvent_EventCreate, - IngestionEvent_GenerationCreate, - IngestionEvent_GenerationUpdate, - IngestionEvent_ObservationCreate, - IngestionEvent_ObservationUpdate, - IngestionEvent_ScoreCreate, - IngestionEvent_SdkLog, - IngestionEvent_SpanCreate, - IngestionEvent_SpanUpdate, - IngestionEvent_TraceCreate, - IngestionResponse, - IngestionSuccess, - IngestionUsage, - LlmAdapter, - LlmConnection, - MapValue, - MediaContentType, - MembershipDeletionResponse, - MembershipRequest, - MembershipResponse, - MembershipRole, - MembershipsResponse, - MethodNotAllowedError, - MetricsResponse, - Model, - ModelPrice, - ModelUsageUnit, - NotFoundError, - NumericScore, - NumericScoreV1, - Observation, - ObservationBody, - ObservationLevel, - ObservationType, - Observations, - ObservationsView, - ObservationsViews, - OpenAiCompletionUsageSchema, - OpenAiResponseUsageSchema, - OpenAiUsage, - OptionalObservationBody, - OrganizationApiKey, - OrganizationApiKeysResponse, - OrganizationProject, - OrganizationProjectsResponse, - OtelAttribute, - OtelAttributeValue, - OtelResource, - OtelResourceSpan, - OtelScope, - OtelScopeSpan, - OtelSpan, - OtelTraceResponse, - PaginatedAnnotationQueueItems, - PaginatedAnnotationQueues, - PaginatedDatasetItems, - PaginatedDatasetRunItems, - PaginatedDatasetRuns, - PaginatedDatasets, - PaginatedLlmConnections, - PaginatedModels, - PaginatedSessions, - PatchMediaBody, - PlaceholderMessage, - Project, - ProjectDeletionResponse, - Projects, - Prompt, - PromptMeta, - PromptMetaListResponse, - PromptType, - Prompt_Chat, - Prompt_Text, - ResourceMeta, - ResourceType, - ResourceTypesResponse, - SchemaExtension, - SchemaResource, - SchemasResponse, - ScimEmail, - ScimFeatureSupport, - ScimName, - ScimUser, - ScimUsersListResponse, - Score, - ScoreBody, - ScoreConfig, - ScoreConfigs, - ScoreDataType, - ScoreEvent, - ScoreSource, - ScoreV1, - ScoreV1_Boolean, - ScoreV1_Categorical, - ScoreV1_Numeric, - Score_Boolean, - Score_Categorical, - Score_Numeric, - SdkLogBody, - SdkLogEvent, - ServiceProviderConfig, - ServiceUnavailableError, - Session, - SessionWithTraces, - Sort, - TextPrompt, - Trace, - TraceBody, - TraceEvent, - TraceWithDetails, - TraceWithFullDetails, - Traces, - UnauthorizedError, - UpdateAnnotationQueueItemRequest, - UpdateEventBody, - UpdateGenerationBody, - UpdateGenerationEvent, - UpdateObservationEvent, - UpdateScoreConfigRequest, - UpdateSpanBody, - UpdateSpanEvent, - UpsertLlmConnectionRequest, - Usage, - UsageDetails, - UserMeta, - annotation_queues, - blob_storage_integrations, - comments, - commons, - dataset_items, - dataset_run_items, - datasets, - health, - ingestion, - llm_connections, - media, - metrics, - models, - observations, - opentelemetry, - organizations, - projects, - prompt_version, - prompts, - scim, - score, - score_configs, - score_v_2, - sessions, - trace, - utils, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .resources import ( + AccessDeniedError, + AnnotationQueue, + AnnotationQueueAssignmentRequest, + AnnotationQueueItem, + AnnotationQueueObjectType, + AnnotationQueueStatus, + ApiKeyDeletionResponse, + ApiKeyList, + ApiKeyResponse, + ApiKeySummary, + AuthenticationScheme, + BaseEvent, + BasePrompt, + BaseScore, + BaseScoreV1, + BlobStorageExportFrequency, + BlobStorageExportMode, + BlobStorageIntegrationDeletionResponse, + BlobStorageIntegrationFileType, + BlobStorageIntegrationResponse, + BlobStorageIntegrationType, + BlobStorageIntegrationsResponse, + BooleanScore, + BooleanScoreV1, + BulkConfig, + CategoricalScore, + CategoricalScoreV1, + ChatMessage, + ChatMessageWithPlaceholders, + ChatPrompt, + Comment, + CommentObjectType, + ConfigCategory, + CreateAnnotationQueueAssignmentResponse, + CreateAnnotationQueueItemRequest, + CreateAnnotationQueueRequest, + CreateBlobStorageIntegrationRequest, + CreateChatPromptRequest, + CreateCommentRequest, + CreateCommentResponse, + CreateDatasetItemRequest, + CreateDatasetRequest, + CreateDatasetRunItemRequest, + CreateEventBody, + CreateEventEvent, + CreateGenerationBody, + CreateGenerationEvent, + CreateModelRequest, + CreateObservationEvent, + CreatePromptRequest, + CreatePromptRequest_Chat, + CreatePromptRequest_Text, + CreateScoreConfigRequest, + CreateScoreRequest, + CreateScoreResponse, + CreateScoreValue, + CreateSpanBody, + CreateSpanEvent, + CreateTextPromptRequest, + Dataset, + DatasetItem, + DatasetRun, + DatasetRunItem, + DatasetRunWithItems, + DatasetStatus, + DeleteAnnotationQueueAssignmentResponse, + DeleteAnnotationQueueItemResponse, + DeleteDatasetItemResponse, + DeleteDatasetRunResponse, + DeleteMembershipRequest, + DeleteTraceResponse, + EmptyResponse, + Error, + FilterConfig, + GetCommentsResponse, + GetMediaResponse, + GetMediaUploadUrlRequest, + GetMediaUploadUrlResponse, + GetScoresResponse, + GetScoresResponseData, + GetScoresResponseDataBoolean, + GetScoresResponseDataCategorical, + GetScoresResponseDataNumeric, + GetScoresResponseData_Boolean, + GetScoresResponseData_Categorical, + GetScoresResponseData_Numeric, + GetScoresResponseTraceData, + HealthResponse, + IngestionError, + IngestionEvent, + IngestionEvent_EventCreate, + IngestionEvent_GenerationCreate, + IngestionEvent_GenerationUpdate, + IngestionEvent_ObservationCreate, + IngestionEvent_ObservationUpdate, + IngestionEvent_ScoreCreate, + IngestionEvent_SdkLog, + IngestionEvent_SpanCreate, + IngestionEvent_SpanUpdate, + IngestionEvent_TraceCreate, + IngestionResponse, + IngestionSuccess, + IngestionUsage, + LlmAdapter, + LlmConnection, + MapValue, + MediaContentType, + MembershipDeletionResponse, + MembershipRequest, + MembershipResponse, + MembershipRole, + MembershipsResponse, + MethodNotAllowedError, + MetricsResponse, + Model, + ModelPrice, + ModelUsageUnit, + NotFoundError, + NumericScore, + NumericScoreV1, + Observation, + ObservationBody, + ObservationLevel, + ObservationType, + Observations, + ObservationsView, + ObservationsViews, + OpenAiCompletionUsageSchema, + OpenAiResponseUsageSchema, + OpenAiUsage, + OptionalObservationBody, + OrganizationApiKey, + OrganizationApiKeysResponse, + OrganizationProject, + OrganizationProjectsResponse, + OtelAttribute, + OtelAttributeValue, + OtelResource, + OtelResourceSpan, + OtelScope, + OtelScopeSpan, + OtelSpan, + OtelTraceResponse, + PaginatedAnnotationQueueItems, + PaginatedAnnotationQueues, + PaginatedDatasetItems, + PaginatedDatasetRunItems, + PaginatedDatasetRuns, + PaginatedDatasets, + PaginatedLlmConnections, + PaginatedModels, + PaginatedSessions, + PatchMediaBody, + PlaceholderMessage, + Project, + ProjectDeletionResponse, + Projects, + Prompt, + PromptMeta, + PromptMetaListResponse, + PromptType, + Prompt_Chat, + Prompt_Text, + ResourceMeta, + ResourceType, + ResourceTypesResponse, + SchemaExtension, + SchemaResource, + SchemasResponse, + ScimEmail, + ScimFeatureSupport, + ScimName, + ScimUser, + ScimUsersListResponse, + Score, + ScoreBody, + ScoreConfig, + ScoreConfigs, + ScoreDataType, + ScoreEvent, + ScoreSource, + ScoreV1, + Score_Boolean, + Score_Categorical, + Score_Numeric, + SdkLogBody, + SdkLogEvent, + ServiceProviderConfig, + ServiceUnavailableError, + Session, + SessionWithTraces, + Sort, + TextPrompt, + Trace, + TraceBody, + TraceEvent, + TraceWithDetails, + TraceWithFullDetails, + Traces, + UnauthorizedError, + UpdateAnnotationQueueItemRequest, + UpdateEventBody, + UpdateGenerationBody, + UpdateGenerationEvent, + UpdateObservationEvent, + UpdateScoreConfigRequest, + UpdateSpanBody, + UpdateSpanEvent, + UpsertLlmConnectionRequest, + Usage, + UsageDetails, + UserMeta, + annotation_queues, + blob_storage_integrations, + comments, + commons, + dataset_items, + dataset_run_items, + datasets, + health, + ingestion, + llm_connections, + media, + metrics, + models, + observations, + opentelemetry, + organizations, + projects, + prompt_version, + prompts, + scim, + score, + score_configs, + score_v_2, + sessions, + trace, + utils, + ) + from .client import AsyncFernLangfuse, FernLangfuse +_dynamic_imports: typing.Dict[str, str] = { + "AccessDeniedError": ".resources", + "AnnotationQueue": ".resources", + "AnnotationQueueAssignmentRequest": ".resources", + "AnnotationQueueItem": ".resources", + "AnnotationQueueObjectType": ".resources", + "AnnotationQueueStatus": ".resources", + "ApiKeyDeletionResponse": ".resources", + "ApiKeyList": ".resources", + "ApiKeyResponse": ".resources", + "ApiKeySummary": ".resources", + "AsyncFernLangfuse": ".client", + "AuthenticationScheme": ".resources", + "BaseEvent": ".resources", + "BasePrompt": ".resources", + "BaseScore": ".resources", + "BaseScoreV1": ".resources", + "BlobStorageExportFrequency": ".resources", + "BlobStorageExportMode": ".resources", + "BlobStorageIntegrationDeletionResponse": ".resources", + "BlobStorageIntegrationFileType": ".resources", + "BlobStorageIntegrationResponse": ".resources", + "BlobStorageIntegrationType": ".resources", + "BlobStorageIntegrationsResponse": ".resources", + "BooleanScore": ".resources", + "BooleanScoreV1": ".resources", + "BulkConfig": ".resources", + "CategoricalScore": ".resources", + "CategoricalScoreV1": ".resources", + "ChatMessage": ".resources", + "ChatMessageWithPlaceholders": ".resources", + "ChatPrompt": ".resources", + "Comment": ".resources", + "CommentObjectType": ".resources", + "ConfigCategory": ".resources", + "CreateAnnotationQueueAssignmentResponse": ".resources", + "CreateAnnotationQueueItemRequest": ".resources", + "CreateAnnotationQueueRequest": ".resources", + "CreateBlobStorageIntegrationRequest": ".resources", + "CreateChatPromptRequest": ".resources", + "CreateCommentRequest": ".resources", + "CreateCommentResponse": ".resources", + "CreateDatasetItemRequest": ".resources", + "CreateDatasetRequest": ".resources", + "CreateDatasetRunItemRequest": ".resources", + "CreateEventBody": ".resources", + "CreateEventEvent": ".resources", + "CreateGenerationBody": ".resources", + "CreateGenerationEvent": ".resources", + "CreateModelRequest": ".resources", + "CreateObservationEvent": ".resources", + "CreatePromptRequest": ".resources", + "CreatePromptRequest_Chat": ".resources", + "CreatePromptRequest_Text": ".resources", + "CreateScoreConfigRequest": ".resources", + "CreateScoreRequest": ".resources", + "CreateScoreResponse": ".resources", + "CreateScoreValue": ".resources", + "CreateSpanBody": ".resources", + "CreateSpanEvent": ".resources", + "CreateTextPromptRequest": ".resources", + "Dataset": ".resources", + "DatasetItem": ".resources", + "DatasetRun": ".resources", + "DatasetRunItem": ".resources", + "DatasetRunWithItems": ".resources", + "DatasetStatus": ".resources", + "DeleteAnnotationQueueAssignmentResponse": ".resources", + "DeleteAnnotationQueueItemResponse": ".resources", + "DeleteDatasetItemResponse": ".resources", + "DeleteDatasetRunResponse": ".resources", + "DeleteMembershipRequest": ".resources", + "DeleteTraceResponse": ".resources", + "EmptyResponse": ".resources", + "Error": ".resources", + "FernLangfuse": ".client", + "FilterConfig": ".resources", + "GetCommentsResponse": ".resources", + "GetMediaResponse": ".resources", + "GetMediaUploadUrlRequest": ".resources", + "GetMediaUploadUrlResponse": ".resources", + "GetScoresResponse": ".resources", + "GetScoresResponseData": ".resources", + "GetScoresResponseDataBoolean": ".resources", + "GetScoresResponseDataCategorical": ".resources", + "GetScoresResponseDataNumeric": ".resources", + "GetScoresResponseData_Boolean": ".resources", + "GetScoresResponseData_Categorical": ".resources", + "GetScoresResponseData_Numeric": ".resources", + "GetScoresResponseTraceData": ".resources", + "HealthResponse": ".resources", + "IngestionError": ".resources", + "IngestionEvent": ".resources", + "IngestionEvent_EventCreate": ".resources", + "IngestionEvent_GenerationCreate": ".resources", + "IngestionEvent_GenerationUpdate": ".resources", + "IngestionEvent_ObservationCreate": ".resources", + "IngestionEvent_ObservationUpdate": ".resources", + "IngestionEvent_ScoreCreate": ".resources", + "IngestionEvent_SdkLog": ".resources", + "IngestionEvent_SpanCreate": ".resources", + "IngestionEvent_SpanUpdate": ".resources", + "IngestionEvent_TraceCreate": ".resources", + "IngestionResponse": ".resources", + "IngestionSuccess": ".resources", + "IngestionUsage": ".resources", + "LlmAdapter": ".resources", + "LlmConnection": ".resources", + "MapValue": ".resources", + "MediaContentType": ".resources", + "MembershipDeletionResponse": ".resources", + "MembershipRequest": ".resources", + "MembershipResponse": ".resources", + "MembershipRole": ".resources", + "MembershipsResponse": ".resources", + "MethodNotAllowedError": ".resources", + "MetricsResponse": ".resources", + "Model": ".resources", + "ModelPrice": ".resources", + "ModelUsageUnit": ".resources", + "NotFoundError": ".resources", + "NumericScore": ".resources", + "NumericScoreV1": ".resources", + "Observation": ".resources", + "ObservationBody": ".resources", + "ObservationLevel": ".resources", + "ObservationType": ".resources", + "Observations": ".resources", + "ObservationsView": ".resources", + "ObservationsViews": ".resources", + "OpenAiCompletionUsageSchema": ".resources", + "OpenAiResponseUsageSchema": ".resources", + "OpenAiUsage": ".resources", + "OptionalObservationBody": ".resources", + "OrganizationApiKey": ".resources", + "OrganizationApiKeysResponse": ".resources", + "OrganizationProject": ".resources", + "OrganizationProjectsResponse": ".resources", + "OtelAttribute": ".resources", + "OtelAttributeValue": ".resources", + "OtelResource": ".resources", + "OtelResourceSpan": ".resources", + "OtelScope": ".resources", + "OtelScopeSpan": ".resources", + "OtelSpan": ".resources", + "OtelTraceResponse": ".resources", + "PaginatedAnnotationQueueItems": ".resources", + "PaginatedAnnotationQueues": ".resources", + "PaginatedDatasetItems": ".resources", + "PaginatedDatasetRunItems": ".resources", + "PaginatedDatasetRuns": ".resources", + "PaginatedDatasets": ".resources", + "PaginatedLlmConnections": ".resources", + "PaginatedModels": ".resources", + "PaginatedSessions": ".resources", + "PatchMediaBody": ".resources", + "PlaceholderMessage": ".resources", + "Project": ".resources", + "ProjectDeletionResponse": ".resources", + "Projects": ".resources", + "Prompt": ".resources", + "PromptMeta": ".resources", + "PromptMetaListResponse": ".resources", + "PromptType": ".resources", + "Prompt_Chat": ".resources", + "Prompt_Text": ".resources", + "ResourceMeta": ".resources", + "ResourceType": ".resources", + "ResourceTypesResponse": ".resources", + "SchemaExtension": ".resources", + "SchemaResource": ".resources", + "SchemasResponse": ".resources", + "ScimEmail": ".resources", + "ScimFeatureSupport": ".resources", + "ScimName": ".resources", + "ScimUser": ".resources", + "ScimUsersListResponse": ".resources", + "Score": ".resources", + "ScoreBody": ".resources", + "ScoreConfig": ".resources", + "ScoreConfigs": ".resources", + "ScoreDataType": ".resources", + "ScoreEvent": ".resources", + "ScoreSource": ".resources", + "ScoreV1": ".resources", + "Score_Boolean": ".resources", + "Score_Categorical": ".resources", + "Score_Numeric": ".resources", + "SdkLogBody": ".resources", + "SdkLogEvent": ".resources", + "ServiceProviderConfig": ".resources", + "ServiceUnavailableError": ".resources", + "Session": ".resources", + "SessionWithTraces": ".resources", + "Sort": ".resources", + "TextPrompt": ".resources", + "Trace": ".resources", + "TraceBody": ".resources", + "TraceEvent": ".resources", + "TraceWithDetails": ".resources", + "TraceWithFullDetails": ".resources", + "Traces": ".resources", + "UnauthorizedError": ".resources", + "UpdateAnnotationQueueItemRequest": ".resources", + "UpdateEventBody": ".resources", + "UpdateGenerationBody": ".resources", + "UpdateGenerationEvent": ".resources", + "UpdateObservationEvent": ".resources", + "UpdateScoreConfigRequest": ".resources", + "UpdateSpanBody": ".resources", + "UpdateSpanEvent": ".resources", + "UpsertLlmConnectionRequest": ".resources", + "Usage": ".resources", + "UsageDetails": ".resources", + "UserMeta": ".resources", + "annotation_queues": ".resources", + "blob_storage_integrations": ".resources", + "comments": ".resources", + "commons": ".resources", + "dataset_items": ".resources", + "dataset_run_items": ".resources", + "datasets": ".resources", + "health": ".resources", + "ingestion": ".resources", + "llm_connections": ".resources", + "media": ".resources", + "metrics": ".resources", + "models": ".resources", + "observations": ".resources", + "opentelemetry": ".resources", + "organizations": ".resources", + "projects": ".resources", + "prompt_version": ".resources", + "prompts": ".resources", + "scim": ".resources", + "score": ".resources", + "score_configs": ".resources", + "score_v_2": ".resources", + "sessions": ".resources", + "trace": ".resources", + "utils": ".resources", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AccessDeniedError", @@ -257,6 +529,7 @@ "ApiKeyList", "ApiKeyResponse", "ApiKeySummary", + "AsyncFernLangfuse", "AuthenticationScheme", "BaseEvent", "BasePrompt", @@ -276,8 +549,6 @@ "CategoricalScoreV1", "ChatMessage", "ChatMessageWithPlaceholders", - "ChatMessageWithPlaceholders_Chatmessage", - "ChatMessageWithPlaceholders_Placeholder", "ChatPrompt", "Comment", "CommentObjectType", @@ -322,6 +593,7 @@ "DeleteTraceResponse", "EmptyResponse", "Error", + "FernLangfuse", "FilterConfig", "GetCommentsResponse", "GetMediaResponse", @@ -431,9 +703,6 @@ "ScoreEvent", "ScoreSource", "ScoreV1", - "ScoreV1_Boolean", - "ScoreV1_Categorical", - "ScoreV1_Numeric", "Score_Boolean", "Score_Categorical", "Score_Numeric", diff --git a/langfuse/api/client.py b/langfuse/api/client.py index 646279b5a..ad37a9dc7 100644 --- a/langfuse/api/client.py +++ b/langfuse/api/client.py @@ -1,55 +1,67 @@ # This file was auto-generated by Fern from our API Definition. +from __future__ import annotations + import typing import httpx - from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .resources.annotation_queues.client import ( - AnnotationQueuesClient, - AsyncAnnotationQueuesClient, -) -from .resources.blob_storage_integrations.client import ( - AsyncBlobStorageIntegrationsClient, - BlobStorageIntegrationsClient, -) -from .resources.comments.client import AsyncCommentsClient, CommentsClient -from .resources.dataset_items.client import AsyncDatasetItemsClient, DatasetItemsClient -from .resources.dataset_run_items.client import ( - AsyncDatasetRunItemsClient, - DatasetRunItemsClient, -) -from .resources.datasets.client import AsyncDatasetsClient, DatasetsClient -from .resources.health.client import AsyncHealthClient, HealthClient -from .resources.ingestion.client import AsyncIngestionClient, IngestionClient -from .resources.llm_connections.client import ( - AsyncLlmConnectionsClient, - LlmConnectionsClient, -) -from .resources.media.client import AsyncMediaClient, MediaClient -from .resources.metrics.client import AsyncMetricsClient, MetricsClient -from .resources.models.client import AsyncModelsClient, ModelsClient -from .resources.observations.client import AsyncObservationsClient, ObservationsClient -from .resources.opentelemetry.client import ( - AsyncOpentelemetryClient, - OpentelemetryClient, -) -from .resources.organizations.client import ( - AsyncOrganizationsClient, - OrganizationsClient, -) -from .resources.projects.client import AsyncProjectsClient, ProjectsClient -from .resources.prompt_version.client import ( - AsyncPromptVersionClient, - PromptVersionClient, -) -from .resources.prompts.client import AsyncPromptsClient, PromptsClient -from .resources.scim.client import AsyncScimClient, ScimClient -from .resources.score.client import AsyncScoreClient, ScoreClient -from .resources.score_configs.client import AsyncScoreConfigsClient, ScoreConfigsClient -from .resources.score_v_2.client import AsyncScoreV2Client, ScoreV2Client -from .resources.sessions.client import AsyncSessionsClient, SessionsClient -from .resources.trace.client import AsyncTraceClient, TraceClient + +if typing.TYPE_CHECKING: + from .resources.annotation_queues.client import ( + AnnotationQueuesClient, + AsyncAnnotationQueuesClient, + ) + from .resources.blob_storage_integrations.client import ( + AsyncBlobStorageIntegrationsClient, + BlobStorageIntegrationsClient, + ) + from .resources.comments.client import AsyncCommentsClient, CommentsClient + from .resources.dataset_items.client import ( + AsyncDatasetItemsClient, + DatasetItemsClient, + ) + from .resources.dataset_run_items.client import ( + AsyncDatasetRunItemsClient, + DatasetRunItemsClient, + ) + from .resources.datasets.client import AsyncDatasetsClient, DatasetsClient + from .resources.health.client import AsyncHealthClient, HealthClient + from .resources.ingestion.client import AsyncIngestionClient, IngestionClient + from .resources.llm_connections.client import ( + AsyncLlmConnectionsClient, + LlmConnectionsClient, + ) + from .resources.media.client import AsyncMediaClient, MediaClient + from .resources.metrics.client import AsyncMetricsClient, MetricsClient + from .resources.models.client import AsyncModelsClient, ModelsClient + from .resources.observations.client import ( + AsyncObservationsClient, + ObservationsClient, + ) + from .resources.opentelemetry.client import ( + AsyncOpentelemetryClient, + OpentelemetryClient, + ) + from .resources.organizations.client import ( + AsyncOrganizationsClient, + OrganizationsClient, + ) + from .resources.projects.client import AsyncProjectsClient, ProjectsClient + from .resources.prompt_version.client import ( + AsyncPromptVersionClient, + PromptVersionClient, + ) + from .resources.prompts.client import AsyncPromptsClient, PromptsClient + from .resources.scim.client import AsyncScimClient, ScimClient + from .resources.score.client import AsyncScoreClient, ScoreClient + from .resources.score_configs.client import ( + AsyncScoreConfigsClient, + ScoreConfigsClient, + ) + from .resources.score_v_2.client import AsyncScoreV2Client, ScoreV2Client + from .resources.sessions.client import AsyncSessionsClient, SessionsClient + from .resources.trace.client import AsyncTraceClient, TraceClient class FernLangfuse: @@ -66,6 +78,9 @@ class FernLangfuse: x_langfuse_public_key : typing.Optional[str] username : typing.Optional[typing.Union[str, typing.Callable[[], str]]] password : typing.Optional[typing.Union[str, typing.Callable[[], str]]] + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + timeout : typing.Optional[float] The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. @@ -77,7 +92,7 @@ class FernLangfuse: Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -98,12 +113,17 @@ def __init__( x_langfuse_public_key: typing.Optional[str] = None, username: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, + headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.Client] = None, ): _defaulted_timeout = ( - timeout if timeout is not None else 60 if httpx_client is None else None + timeout + if timeout is not None + else 60 + if httpx_client is None + else httpx_client.timeout.read ) self._client_wrapper = SyncClientWrapper( base_url=base_url, @@ -112,6 +132,7 @@ def __init__( x_langfuse_public_key=x_langfuse_public_key, username=username, password=password, + headers=headers, httpx_client=httpx_client if httpx_client is not None else httpx.Client( @@ -121,36 +142,244 @@ def __init__( else httpx.Client(timeout=_defaulted_timeout), timeout=_defaulted_timeout, ) - self.annotation_queues = AnnotationQueuesClient( - client_wrapper=self._client_wrapper - ) - self.blob_storage_integrations = BlobStorageIntegrationsClient( - client_wrapper=self._client_wrapper - ) - self.comments = CommentsClient(client_wrapper=self._client_wrapper) - self.dataset_items = DatasetItemsClient(client_wrapper=self._client_wrapper) - self.dataset_run_items = DatasetRunItemsClient( - client_wrapper=self._client_wrapper - ) - self.datasets = DatasetsClient(client_wrapper=self._client_wrapper) - self.health = HealthClient(client_wrapper=self._client_wrapper) - self.ingestion = IngestionClient(client_wrapper=self._client_wrapper) - self.llm_connections = LlmConnectionsClient(client_wrapper=self._client_wrapper) - self.media = MediaClient(client_wrapper=self._client_wrapper) - self.metrics = MetricsClient(client_wrapper=self._client_wrapper) - self.models = ModelsClient(client_wrapper=self._client_wrapper) - self.observations = ObservationsClient(client_wrapper=self._client_wrapper) - self.opentelemetry = OpentelemetryClient(client_wrapper=self._client_wrapper) - self.organizations = OrganizationsClient(client_wrapper=self._client_wrapper) - self.projects = ProjectsClient(client_wrapper=self._client_wrapper) - self.prompt_version = PromptVersionClient(client_wrapper=self._client_wrapper) - self.prompts = PromptsClient(client_wrapper=self._client_wrapper) - self.scim = ScimClient(client_wrapper=self._client_wrapper) - self.score_configs = ScoreConfigsClient(client_wrapper=self._client_wrapper) - self.score_v_2 = ScoreV2Client(client_wrapper=self._client_wrapper) - self.score = ScoreClient(client_wrapper=self._client_wrapper) - self.sessions = SessionsClient(client_wrapper=self._client_wrapper) - self.trace = TraceClient(client_wrapper=self._client_wrapper) + self._annotation_queues: typing.Optional[AnnotationQueuesClient] = None + self._blob_storage_integrations: typing.Optional[ + BlobStorageIntegrationsClient + ] = None + self._comments: typing.Optional[CommentsClient] = None + self._dataset_items: typing.Optional[DatasetItemsClient] = None + self._dataset_run_items: typing.Optional[DatasetRunItemsClient] = None + self._datasets: typing.Optional[DatasetsClient] = None + self._health: typing.Optional[HealthClient] = None + self._ingestion: typing.Optional[IngestionClient] = None + self._llm_connections: typing.Optional[LlmConnectionsClient] = None + self._media: typing.Optional[MediaClient] = None + self._metrics: typing.Optional[MetricsClient] = None + self._models: typing.Optional[ModelsClient] = None + self._observations: typing.Optional[ObservationsClient] = None + self._opentelemetry: typing.Optional[OpentelemetryClient] = None + self._organizations: typing.Optional[OrganizationsClient] = None + self._projects: typing.Optional[ProjectsClient] = None + self._prompt_version: typing.Optional[PromptVersionClient] = None + self._prompts: typing.Optional[PromptsClient] = None + self._scim: typing.Optional[ScimClient] = None + self._score_configs: typing.Optional[ScoreConfigsClient] = None + self._score_v_2: typing.Optional[ScoreV2Client] = None + self._score: typing.Optional[ScoreClient] = None + self._sessions: typing.Optional[SessionsClient] = None + self._trace: typing.Optional[TraceClient] = None + + @property + def annotation_queues(self): + if self._annotation_queues is None: + from .resources.annotation_queues.client import AnnotationQueuesClient # noqa: E402 + + self._annotation_queues = AnnotationQueuesClient( + client_wrapper=self._client_wrapper + ) + return self._annotation_queues + + @property + def blob_storage_integrations(self): + if self._blob_storage_integrations is None: + from .resources.blob_storage_integrations.client import ( + BlobStorageIntegrationsClient, + ) # noqa: E402 + + self._blob_storage_integrations = BlobStorageIntegrationsClient( + client_wrapper=self._client_wrapper + ) + return self._blob_storage_integrations + + @property + def comments(self): + if self._comments is None: + from .resources.comments.client import CommentsClient # noqa: E402 + + self._comments = CommentsClient(client_wrapper=self._client_wrapper) + return self._comments + + @property + def dataset_items(self): + if self._dataset_items is None: + from .resources.dataset_items.client import DatasetItemsClient # noqa: E402 + + self._dataset_items = DatasetItemsClient( + client_wrapper=self._client_wrapper + ) + return self._dataset_items + + @property + def dataset_run_items(self): + if self._dataset_run_items is None: + from .resources.dataset_run_items.client import DatasetRunItemsClient # noqa: E402 + + self._dataset_run_items = DatasetRunItemsClient( + client_wrapper=self._client_wrapper + ) + return self._dataset_run_items + + @property + def datasets(self): + if self._datasets is None: + from .resources.datasets.client import DatasetsClient # noqa: E402 + + self._datasets = DatasetsClient(client_wrapper=self._client_wrapper) + return self._datasets + + @property + def health(self): + if self._health is None: + from .resources.health.client import HealthClient # noqa: E402 + + self._health = HealthClient(client_wrapper=self._client_wrapper) + return self._health + + @property + def ingestion(self): + if self._ingestion is None: + from .resources.ingestion.client import IngestionClient # noqa: E402 + + self._ingestion = IngestionClient(client_wrapper=self._client_wrapper) + return self._ingestion + + @property + def llm_connections(self): + if self._llm_connections is None: + from .resources.llm_connections.client import LlmConnectionsClient # noqa: E402 + + self._llm_connections = LlmConnectionsClient( + client_wrapper=self._client_wrapper + ) + return self._llm_connections + + @property + def media(self): + if self._media is None: + from .resources.media.client import MediaClient # noqa: E402 + + self._media = MediaClient(client_wrapper=self._client_wrapper) + return self._media + + @property + def metrics(self): + if self._metrics is None: + from .resources.metrics.client import MetricsClient # noqa: E402 + + self._metrics = MetricsClient(client_wrapper=self._client_wrapper) + return self._metrics + + @property + def models(self): + if self._models is None: + from .resources.models.client import ModelsClient # noqa: E402 + + self._models = ModelsClient(client_wrapper=self._client_wrapper) + return self._models + + @property + def observations(self): + if self._observations is None: + from .resources.observations.client import ObservationsClient # noqa: E402 + + self._observations = ObservationsClient(client_wrapper=self._client_wrapper) + return self._observations + + @property + def opentelemetry(self): + if self._opentelemetry is None: + from .resources.opentelemetry.client import OpentelemetryClient # noqa: E402 + + self._opentelemetry = OpentelemetryClient( + client_wrapper=self._client_wrapper + ) + return self._opentelemetry + + @property + def organizations(self): + if self._organizations is None: + from .resources.organizations.client import OrganizationsClient # noqa: E402 + + self._organizations = OrganizationsClient( + client_wrapper=self._client_wrapper + ) + return self._organizations + + @property + def projects(self): + if self._projects is None: + from .resources.projects.client import ProjectsClient # noqa: E402 + + self._projects = ProjectsClient(client_wrapper=self._client_wrapper) + return self._projects + + @property + def prompt_version(self): + if self._prompt_version is None: + from .resources.prompt_version.client import PromptVersionClient # noqa: E402 + + self._prompt_version = PromptVersionClient( + client_wrapper=self._client_wrapper + ) + return self._prompt_version + + @property + def prompts(self): + if self._prompts is None: + from .resources.prompts.client import PromptsClient # noqa: E402 + + self._prompts = PromptsClient(client_wrapper=self._client_wrapper) + return self._prompts + + @property + def scim(self): + if self._scim is None: + from .resources.scim.client import ScimClient # noqa: E402 + + self._scim = ScimClient(client_wrapper=self._client_wrapper) + return self._scim + + @property + def score_configs(self): + if self._score_configs is None: + from .resources.score_configs.client import ScoreConfigsClient # noqa: E402 + + self._score_configs = ScoreConfigsClient( + client_wrapper=self._client_wrapper + ) + return self._score_configs + + @property + def score_v_2(self): + if self._score_v_2 is None: + from .resources.score_v_2.client import ScoreV2Client # noqa: E402 + + self._score_v_2 = ScoreV2Client(client_wrapper=self._client_wrapper) + return self._score_v_2 + + @property + def score(self): + if self._score is None: + from .resources.score.client import ScoreClient # noqa: E402 + + self._score = ScoreClient(client_wrapper=self._client_wrapper) + return self._score + + @property + def sessions(self): + if self._sessions is None: + from .resources.sessions.client import SessionsClient # noqa: E402 + + self._sessions = SessionsClient(client_wrapper=self._client_wrapper) + return self._sessions + + @property + def trace(self): + if self._trace is None: + from .resources.trace.client import TraceClient # noqa: E402 + + self._trace = TraceClient(client_wrapper=self._client_wrapper) + return self._trace class AsyncFernLangfuse: @@ -167,6 +396,9 @@ class AsyncFernLangfuse: x_langfuse_public_key : typing.Optional[str] username : typing.Optional[typing.Union[str, typing.Callable[[], str]]] password : typing.Optional[typing.Union[str, typing.Callable[[], str]]] + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + timeout : typing.Optional[float] The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. @@ -178,7 +410,7 @@ class AsyncFernLangfuse: Examples -------- - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -199,12 +431,17 @@ def __init__( x_langfuse_public_key: typing.Optional[str] = None, username: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, + headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.AsyncClient] = None, ): _defaulted_timeout = ( - timeout if timeout is not None else 60 if httpx_client is None else None + timeout + if timeout is not None + else 60 + if httpx_client is None + else httpx_client.timeout.read ) self._client_wrapper = AsyncClientWrapper( base_url=base_url, @@ -213,6 +450,7 @@ def __init__( x_langfuse_public_key=x_langfuse_public_key, username=username, password=password, + headers=headers, httpx_client=httpx_client if httpx_client is not None else httpx.AsyncClient( @@ -222,45 +460,243 @@ def __init__( else httpx.AsyncClient(timeout=_defaulted_timeout), timeout=_defaulted_timeout, ) - self.annotation_queues = AsyncAnnotationQueuesClient( - client_wrapper=self._client_wrapper - ) - self.blob_storage_integrations = AsyncBlobStorageIntegrationsClient( - client_wrapper=self._client_wrapper - ) - self.comments = AsyncCommentsClient(client_wrapper=self._client_wrapper) - self.dataset_items = AsyncDatasetItemsClient( - client_wrapper=self._client_wrapper - ) - self.dataset_run_items = AsyncDatasetRunItemsClient( - client_wrapper=self._client_wrapper - ) - self.datasets = AsyncDatasetsClient(client_wrapper=self._client_wrapper) - self.health = AsyncHealthClient(client_wrapper=self._client_wrapper) - self.ingestion = AsyncIngestionClient(client_wrapper=self._client_wrapper) - self.llm_connections = AsyncLlmConnectionsClient( - client_wrapper=self._client_wrapper - ) - self.media = AsyncMediaClient(client_wrapper=self._client_wrapper) - self.metrics = AsyncMetricsClient(client_wrapper=self._client_wrapper) - self.models = AsyncModelsClient(client_wrapper=self._client_wrapper) - self.observations = AsyncObservationsClient(client_wrapper=self._client_wrapper) - self.opentelemetry = AsyncOpentelemetryClient( - client_wrapper=self._client_wrapper - ) - self.organizations = AsyncOrganizationsClient( - client_wrapper=self._client_wrapper - ) - self.projects = AsyncProjectsClient(client_wrapper=self._client_wrapper) - self.prompt_version = AsyncPromptVersionClient( - client_wrapper=self._client_wrapper - ) - self.prompts = AsyncPromptsClient(client_wrapper=self._client_wrapper) - self.scim = AsyncScimClient(client_wrapper=self._client_wrapper) - self.score_configs = AsyncScoreConfigsClient( - client_wrapper=self._client_wrapper - ) - self.score_v_2 = AsyncScoreV2Client(client_wrapper=self._client_wrapper) - self.score = AsyncScoreClient(client_wrapper=self._client_wrapper) - self.sessions = AsyncSessionsClient(client_wrapper=self._client_wrapper) - self.trace = AsyncTraceClient(client_wrapper=self._client_wrapper) + self._annotation_queues: typing.Optional[AsyncAnnotationQueuesClient] = None + self._blob_storage_integrations: typing.Optional[ + AsyncBlobStorageIntegrationsClient + ] = None + self._comments: typing.Optional[AsyncCommentsClient] = None + self._dataset_items: typing.Optional[AsyncDatasetItemsClient] = None + self._dataset_run_items: typing.Optional[AsyncDatasetRunItemsClient] = None + self._datasets: typing.Optional[AsyncDatasetsClient] = None + self._health: typing.Optional[AsyncHealthClient] = None + self._ingestion: typing.Optional[AsyncIngestionClient] = None + self._llm_connections: typing.Optional[AsyncLlmConnectionsClient] = None + self._media: typing.Optional[AsyncMediaClient] = None + self._metrics: typing.Optional[AsyncMetricsClient] = None + self._models: typing.Optional[AsyncModelsClient] = None + self._observations: typing.Optional[AsyncObservationsClient] = None + self._opentelemetry: typing.Optional[AsyncOpentelemetryClient] = None + self._organizations: typing.Optional[AsyncOrganizationsClient] = None + self._projects: typing.Optional[AsyncProjectsClient] = None + self._prompt_version: typing.Optional[AsyncPromptVersionClient] = None + self._prompts: typing.Optional[AsyncPromptsClient] = None + self._scim: typing.Optional[AsyncScimClient] = None + self._score_configs: typing.Optional[AsyncScoreConfigsClient] = None + self._score_v_2: typing.Optional[AsyncScoreV2Client] = None + self._score: typing.Optional[AsyncScoreClient] = None + self._sessions: typing.Optional[AsyncSessionsClient] = None + self._trace: typing.Optional[AsyncTraceClient] = None + + @property + def annotation_queues(self): + if self._annotation_queues is None: + from .resources.annotation_queues.client import AsyncAnnotationQueuesClient # noqa: E402 + + self._annotation_queues = AsyncAnnotationQueuesClient( + client_wrapper=self._client_wrapper + ) + return self._annotation_queues + + @property + def blob_storage_integrations(self): + if self._blob_storage_integrations is None: + from .resources.blob_storage_integrations.client import ( + AsyncBlobStorageIntegrationsClient, + ) # noqa: E402 + + self._blob_storage_integrations = AsyncBlobStorageIntegrationsClient( + client_wrapper=self._client_wrapper + ) + return self._blob_storage_integrations + + @property + def comments(self): + if self._comments is None: + from .resources.comments.client import AsyncCommentsClient # noqa: E402 + + self._comments = AsyncCommentsClient(client_wrapper=self._client_wrapper) + return self._comments + + @property + def dataset_items(self): + if self._dataset_items is None: + from .resources.dataset_items.client import AsyncDatasetItemsClient # noqa: E402 + + self._dataset_items = AsyncDatasetItemsClient( + client_wrapper=self._client_wrapper + ) + return self._dataset_items + + @property + def dataset_run_items(self): + if self._dataset_run_items is None: + from .resources.dataset_run_items.client import AsyncDatasetRunItemsClient # noqa: E402 + + self._dataset_run_items = AsyncDatasetRunItemsClient( + client_wrapper=self._client_wrapper + ) + return self._dataset_run_items + + @property + def datasets(self): + if self._datasets is None: + from .resources.datasets.client import AsyncDatasetsClient # noqa: E402 + + self._datasets = AsyncDatasetsClient(client_wrapper=self._client_wrapper) + return self._datasets + + @property + def health(self): + if self._health is None: + from .resources.health.client import AsyncHealthClient # noqa: E402 + + self._health = AsyncHealthClient(client_wrapper=self._client_wrapper) + return self._health + + @property + def ingestion(self): + if self._ingestion is None: + from .resources.ingestion.client import AsyncIngestionClient # noqa: E402 + + self._ingestion = AsyncIngestionClient(client_wrapper=self._client_wrapper) + return self._ingestion + + @property + def llm_connections(self): + if self._llm_connections is None: + from .resources.llm_connections.client import AsyncLlmConnectionsClient # noqa: E402 + + self._llm_connections = AsyncLlmConnectionsClient( + client_wrapper=self._client_wrapper + ) + return self._llm_connections + + @property + def media(self): + if self._media is None: + from .resources.media.client import AsyncMediaClient # noqa: E402 + + self._media = AsyncMediaClient(client_wrapper=self._client_wrapper) + return self._media + + @property + def metrics(self): + if self._metrics is None: + from .resources.metrics.client import AsyncMetricsClient # noqa: E402 + + self._metrics = AsyncMetricsClient(client_wrapper=self._client_wrapper) + return self._metrics + + @property + def models(self): + if self._models is None: + from .resources.models.client import AsyncModelsClient # noqa: E402 + + self._models = AsyncModelsClient(client_wrapper=self._client_wrapper) + return self._models + + @property + def observations(self): + if self._observations is None: + from .resources.observations.client import AsyncObservationsClient # noqa: E402 + + self._observations = AsyncObservationsClient( + client_wrapper=self._client_wrapper + ) + return self._observations + + @property + def opentelemetry(self): + if self._opentelemetry is None: + from .resources.opentelemetry.client import AsyncOpentelemetryClient # noqa: E402 + + self._opentelemetry = AsyncOpentelemetryClient( + client_wrapper=self._client_wrapper + ) + return self._opentelemetry + + @property + def organizations(self): + if self._organizations is None: + from .resources.organizations.client import AsyncOrganizationsClient # noqa: E402 + + self._organizations = AsyncOrganizationsClient( + client_wrapper=self._client_wrapper + ) + return self._organizations + + @property + def projects(self): + if self._projects is None: + from .resources.projects.client import AsyncProjectsClient # noqa: E402 + + self._projects = AsyncProjectsClient(client_wrapper=self._client_wrapper) + return self._projects + + @property + def prompt_version(self): + if self._prompt_version is None: + from .resources.prompt_version.client import AsyncPromptVersionClient # noqa: E402 + + self._prompt_version = AsyncPromptVersionClient( + client_wrapper=self._client_wrapper + ) + return self._prompt_version + + @property + def prompts(self): + if self._prompts is None: + from .resources.prompts.client import AsyncPromptsClient # noqa: E402 + + self._prompts = AsyncPromptsClient(client_wrapper=self._client_wrapper) + return self._prompts + + @property + def scim(self): + if self._scim is None: + from .resources.scim.client import AsyncScimClient # noqa: E402 + + self._scim = AsyncScimClient(client_wrapper=self._client_wrapper) + return self._scim + + @property + def score_configs(self): + if self._score_configs is None: + from .resources.score_configs.client import AsyncScoreConfigsClient # noqa: E402 + + self._score_configs = AsyncScoreConfigsClient( + client_wrapper=self._client_wrapper + ) + return self._score_configs + + @property + def score_v_2(self): + if self._score_v_2 is None: + from .resources.score_v_2.client import AsyncScoreV2Client # noqa: E402 + + self._score_v_2 = AsyncScoreV2Client(client_wrapper=self._client_wrapper) + return self._score_v_2 + + @property + def score(self): + if self._score is None: + from .resources.score.client import AsyncScoreClient # noqa: E402 + + self._score = AsyncScoreClient(client_wrapper=self._client_wrapper) + return self._score + + @property + def sessions(self): + if self._sessions is None: + from .resources.sessions.client import AsyncSessionsClient # noqa: E402 + + self._sessions = AsyncSessionsClient(client_wrapper=self._client_wrapper) + return self._sessions + + @property + def trace(self): + if self._trace is None: + from .resources.trace.client import AsyncTraceClient # noqa: E402 + + self._trace = AsyncTraceClient(client_wrapper=self._client_wrapper) + return self._trace diff --git a/langfuse/api/core/__init__.py b/langfuse/api/core/__init__.py index 58ad52ad2..91742cd87 100644 --- a/langfuse/api/core/__init__.py +++ b/langfuse/api/core/__init__.py @@ -1,30 +1,111 @@ # This file was auto-generated by Fern from our API Definition. -from .api_error import ApiError -from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper -from .datetime_utils import serialize_datetime -from .file import File, convert_file_dict_to_httpx_tuples -from .http_client import AsyncHttpClient, HttpClient -from .jsonable_encoder import jsonable_encoder -from .pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .query_encoder import encode_query -from .remove_none_from_dict import remove_none_from_dict -from .request_options import RequestOptions +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .api_error import ApiError + from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper + from .datetime_utils import serialize_datetime + from .file import File, convert_file_dict_to_httpx_tuples, with_content_type + from .http_client import AsyncHttpClient, HttpClient + from .http_response import AsyncHttpResponse, HttpResponse + from .jsonable_encoder import jsonable_encoder + from .pydantic_utilities import ( + IS_PYDANTIC_V2, + UniversalBaseModel, + UniversalRootModel, + parse_obj_as, + universal_field_validator, + universal_root_validator, + update_forward_refs, + ) + from .query_encoder import encode_query + from .remove_none_from_dict import remove_none_from_dict + from .request_options import RequestOptions + from .serialization import FieldMetadata, convert_and_respect_annotation_metadata +_dynamic_imports: typing.Dict[str, str] = { + "ApiError": ".api_error", + "AsyncClientWrapper": ".client_wrapper", + "AsyncHttpClient": ".http_client", + "AsyncHttpResponse": ".http_response", + "BaseClientWrapper": ".client_wrapper", + "FieldMetadata": ".serialization", + "File": ".file", + "HttpClient": ".http_client", + "HttpResponse": ".http_response", + "IS_PYDANTIC_V2": ".pydantic_utilities", + "RequestOptions": ".request_options", + "SyncClientWrapper": ".client_wrapper", + "UniversalBaseModel": ".pydantic_utilities", + "UniversalRootModel": ".pydantic_utilities", + "convert_and_respect_annotation_metadata": ".serialization", + "convert_file_dict_to_httpx_tuples": ".file", + "encode_query": ".query_encoder", + "jsonable_encoder": ".jsonable_encoder", + "parse_obj_as": ".pydantic_utilities", + "remove_none_from_dict": ".remove_none_from_dict", + "serialize_datetime": ".datetime_utils", + "universal_field_validator": ".pydantic_utilities", + "universal_root_validator": ".pydantic_utilities", + "update_forward_refs": ".pydantic_utilities", + "with_content_type": ".file", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "ApiError", "AsyncClientWrapper", "AsyncHttpClient", + "AsyncHttpResponse", "BaseClientWrapper", + "FieldMetadata", "File", "HttpClient", + "HttpResponse", + "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", + "UniversalBaseModel", + "UniversalRootModel", + "convert_and_respect_annotation_metadata", "convert_file_dict_to_httpx_tuples", - "deep_union_pydantic_dicts", "encode_query", "jsonable_encoder", - "pydantic_v1", + "parse_obj_as", "remove_none_from_dict", "serialize_datetime", + "universal_field_validator", + "universal_root_validator", + "update_forward_refs", + "with_content_type", ] diff --git a/langfuse/api/core/api_error.py b/langfuse/api/core/api_error.py index da734b580..6f850a60c 100644 --- a/langfuse/api/core/api_error.py +++ b/langfuse/api/core/api_error.py @@ -1,17 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from typing import Any, Dict, Optional class ApiError(Exception): - status_code: typing.Optional[int] - body: typing.Any + headers: Optional[Dict[str, str]] + status_code: Optional[int] + body: Any def __init__( - self, *, status_code: typing.Optional[int] = None, body: typing.Any = None - ): + self, + *, + headers: Optional[Dict[str, str]] = None, + status_code: Optional[int] = None, + body: Any = None, + ) -> None: + self.headers = headers self.status_code = status_code self.body = body def __str__(self) -> str: - return f"status_code: {self.status_code}, body: {self.body}" + return f"headers: {self.headers}, status_code: {self.status_code}, body: {self.body}" diff --git a/langfuse/api/core/client_wrapper.py b/langfuse/api/core/client_wrapper.py index 8a053f4a7..fc33e2937 100644 --- a/langfuse/api/core/client_wrapper.py +++ b/langfuse/api/core/client_wrapper.py @@ -3,7 +3,6 @@ import typing import httpx - from .http_client import AsyncHttpClient, HttpClient @@ -16,6 +15,7 @@ def __init__( x_langfuse_public_key: typing.Optional[str] = None, username: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, + headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, ): @@ -24,11 +24,15 @@ def __init__( self._x_langfuse_public_key = x_langfuse_public_key self._username = username self._password = password + self._headers = headers self._base_url = base_url self._timeout = timeout def get_headers(self) -> typing.Dict[str, str]: - headers: typing.Dict[str, str] = {"X-Fern-Language": "Python"} + headers: typing.Dict[str, str] = { + "X-Fern-Language": "Python", + **(self.get_custom_headers() or {}), + } username = self._get_username() password = self._get_password() if username is not None and password is not None: @@ -53,6 +57,9 @@ def _get_password(self) -> typing.Optional[str]: else: return self._password() + def get_custom_headers(self) -> typing.Optional[typing.Dict[str, str]]: + return self._headers + def get_base_url(self) -> str: return self._base_url @@ -69,6 +76,7 @@ def __init__( x_langfuse_public_key: typing.Optional[str] = None, username: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, + headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.Client, @@ -79,14 +87,15 @@ def __init__( x_langfuse_public_key=x_langfuse_public_key, username=username, password=password, + headers=headers, base_url=base_url, timeout=timeout, ) self.httpx_client = HttpClient( httpx_client=httpx_client, - base_headers=self.get_headers(), - base_timeout=self.get_timeout(), - base_url=self.get_base_url(), + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, ) @@ -99,6 +108,7 @@ def __init__( x_langfuse_public_key: typing.Optional[str] = None, username: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, + headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.AsyncClient, @@ -109,12 +119,13 @@ def __init__( x_langfuse_public_key=x_langfuse_public_key, username=username, password=password, + headers=headers, base_url=base_url, timeout=timeout, ) self.httpx_client = AsyncHttpClient( httpx_client=httpx_client, - base_headers=self.get_headers(), - base_timeout=self.get_timeout(), - base_url=self.get_base_url(), + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, ) diff --git a/langfuse/api/core/file.py b/langfuse/api/core/file.py index 6e0f92bfc..3467175cb 100644 --- a/langfuse/api/core/file.py +++ b/langfuse/api/core/file.py @@ -1,30 +1,30 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast # File typing inspired by the flexibility of types within the httpx library # https://github.com/encode/httpx/blob/master/httpx/_types.py -FileContent = typing.Union[typing.IO[bytes], bytes, str] -File = typing.Union[ +FileContent = Union[IO[bytes], bytes, str] +File = Union[ # file (or bytes) FileContent, # (filename, file (or bytes)) - typing.Tuple[typing.Optional[str], FileContent], + Tuple[Optional[str], FileContent], # (filename, file (or bytes), content_type) - typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]], + Tuple[Optional[str], FileContent, Optional[str]], # (filename, file (or bytes), content_type, headers) - typing.Tuple[ - typing.Optional[str], + Tuple[ + Optional[str], FileContent, - typing.Optional[str], - typing.Mapping[str, str], + Optional[str], + Mapping[str, str], ], ] def convert_file_dict_to_httpx_tuples( - d: typing.Dict[str, typing.Union[File, typing.List[File]]], -) -> typing.List[typing.Tuple[str, File]]: + d: Dict[str, Union[File, List[File]]], +) -> List[Tuple[str, File]]: """ The format we use is a list of tuples, where the first element is the name of the file and the second is the file object. Typically HTTPX wants @@ -41,3 +41,30 @@ def convert_file_dict_to_httpx_tuples( else: httpx_tuples.append((key, file_like)) return httpx_tuples + + +def with_content_type(*, file: File, default_content_type: str) -> File: + """ + This function resolves to the file's content type, if provided, and defaults + to the default_content_type value if not. + """ + if isinstance(file, tuple): + if len(file) == 2: + filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore + return (filename, content, default_content_type) + elif len(file) == 3: + filename, content, file_content_type = cast( + Tuple[Optional[str], FileContent, Optional[str]], file + ) # type: ignore + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type) + elif len(file) == 4: + filename, content, file_content_type, headers = cast( # type: ignore + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], + file, + ) + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type, headers) + else: + raise ValueError(f"Unexpected tuple length: {len(file)}") + return (None, file, default_content_type) diff --git a/langfuse/api/core/force_multipart.py b/langfuse/api/core/force_multipart.py new file mode 100644 index 000000000..5440913fd --- /dev/null +++ b/langfuse/api/core/force_multipart.py @@ -0,0 +1,18 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict + + +class ForceMultipartDict(Dict[str, Any]): + """ + A dictionary subclass that always evaluates to True in boolean contexts. + + This is used to force multipart/form-data encoding in HTTP requests even when + the dictionary is empty, which would normally evaluate to False. + """ + + def __bool__(self) -> bool: + return True + + +FORCE_MULTIPART = ForceMultipartDict() diff --git a/langfuse/api/core/http_client.py b/langfuse/api/core/http_client.py index 091f71bc1..8aee8acb8 100644 --- a/langfuse/api/core/http_client.py +++ b/langfuse/api/core/http_client.py @@ -2,7 +2,6 @@ import asyncio import email.utils -import json import re import time import typing @@ -11,12 +10,13 @@ from random import random import httpx - from .file import File, convert_file_dict_to_httpx_tuples +from .force_multipart import FORCE_MULTIPART from .jsonable_encoder import jsonable_encoder from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions +from httpx._types import RequestFiles INITIAL_RETRY_DELAY_SECONDS = 0.5 MAX_RETRY_DELAY_SECONDS = 10 @@ -87,8 +87,8 @@ def _retry_timeout(response: httpx.Response, retries: int) -> float: def _should_retry(response: httpx.Response) -> bool: - retriable_400s = [429, 408, 409] - return response.status_code >= 500 or response.status_code in retriable_400s + retryable_400s = [429, 408, 409] + return response.status_code >= 500 or response.status_code in retryable_400s def remove_omit_from_dict( @@ -147,7 +147,10 @@ def get_request_body( # If both data and json are None, we send json data in the event extra properties are specified json_body = maybe_filter_request_body(json, request_options, omit) - return json_body, data_body + # If you have an empty JSON body, you should just send None + return ( + json_body if json_body != {} else None + ), data_body if data_body != {} else None class HttpClient: @@ -155,9 +158,9 @@ def __init__( self, *, httpx_client: httpx.Client, - base_timeout: typing.Optional[float], - base_headers: typing.Dict[str, str], - base_url: typing.Optional[str] = None, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, ): self.base_url = base_url self.base_timeout = base_timeout @@ -165,7 +168,10 @@ def __init__( self.httpx_client = httpx_client def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: - base_url = self.base_url if maybe_base_url is None else maybe_base_url + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + if base_url is None: raise ValueError( "A base_url is required to make this request, please provide one and try again." @@ -185,32 +191,49 @@ def request( typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] ] = None, files: typing.Optional[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + typing.Union[ + typing.Dict[ + str, typing.Optional[typing.Union[File, typing.List[File]]] + ], + typing.List[typing.Tuple[str, File]], + ] ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() ) json_body, data_body = get_request_body( json=json, data=data, request_options=request_options, omit=omit ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples( + remove_omit_from_dict(remove_none_from_dict(files), omit) + ) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), **( request_options.get("additional_headers", {}) or {} @@ -243,9 +266,7 @@ def request( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) - if files is not None - else None, + files=request_files, timeout=timeout, ) @@ -285,21 +306,38 @@ def stream( typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] ] = None, files: typing.Optional[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + typing.Union[ + typing.Dict[ + str, typing.Optional[typing.Union[File, typing.List[File]]] + ], + typing.List[typing.Tuple[str, File]], + ] ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.Iterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() + ) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples( + remove_omit_from_dict(remove_none_from_dict(files), omit) + ) + if (files is not None and files is not omit and isinstance(files, dict)) + else None ) + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body( json=json, data=data, request_options=request_options, omit=omit ) @@ -310,7 +348,7 @@ def stream( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), **( request_options.get("additional_headers", {}) @@ -342,9 +380,7 @@ def stream( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) - if files is not None - else None, + files=request_files, timeout=timeout, ) as stream: yield stream @@ -355,9 +391,9 @@ def __init__( self, *, httpx_client: httpx.AsyncClient, - base_timeout: typing.Optional[float], - base_headers: typing.Dict[str, str], - base_url: typing.Optional[str] = None, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, ): self.base_url = base_url self.base_timeout = base_timeout @@ -365,7 +401,10 @@ def __init__( self.httpx_client = httpx_client def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: - base_url = self.base_url if maybe_base_url is None else maybe_base_url + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + if base_url is None: raise ValueError( "A base_url is required to make this request, please provide one and try again." @@ -385,21 +424,38 @@ async def request( typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] ] = None, files: typing.Optional[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + typing.Union[ + typing.Dict[ + str, typing.Optional[typing.Union[File, typing.List[File]]] + ], + typing.List[typing.Tuple[str, File]], + ] ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() + ) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples( + remove_omit_from_dict(remove_none_from_dict(files), omit) + ) + if (files is not None and files is not omit and isinstance(files, dict)) + else None ) + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body( json=json, data=data, request_options=request_options, omit=omit ) @@ -411,7 +467,7 @@ async def request( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), **( request_options.get("additional_headers", {}) or {} @@ -444,9 +500,7 @@ async def request( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) - if files is not None - else None, + files=request_files, timeout=timeout, ) @@ -485,21 +539,38 @@ async def stream( typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] ] = None, files: typing.Optional[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + typing.Union[ + typing.Dict[ + str, typing.Optional[typing.Union[File, typing.List[File]]] + ], + typing.List[typing.Tuple[str, File]], + ] ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.AsyncIterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() + ) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples( + remove_omit_from_dict(remove_none_from_dict(files), omit) + ) + if (files is not None and files is not omit and isinstance(files, dict)) + else None ) + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body( json=json, data=data, request_options=request_options, omit=omit ) @@ -510,7 +581,7 @@ async def stream( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), **( request_options.get("additional_headers", {}) @@ -542,9 +613,7 @@ async def stream( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) - if files is not None - else None, + files=request_files, timeout=timeout, ) as stream: yield stream diff --git a/langfuse/api/core/http_response.py b/langfuse/api/core/http_response.py new file mode 100644 index 000000000..2479747e8 --- /dev/null +++ b/langfuse/api/core/http_response.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Dict, Generic, TypeVar + +import httpx + +# Generic to represent the underlying type of the data wrapped by the HTTP response. +T = TypeVar("T") + + +class BaseHttpResponse: + """Minimalist HTTP response wrapper that exposes response headers.""" + + _response: httpx.Response + + def __init__(self, response: httpx.Response): + self._response = response + + @property + def headers(self) -> Dict[str, str]: + return dict(self._response.headers) + + +class HttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + def close(self) -> None: + self._response.close() + + +class AsyncHttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + async def close(self) -> None: + await self._response.aclose() diff --git a/langfuse/api/core/http_sse/__init__.py b/langfuse/api/core/http_sse/__init__.py new file mode 100644 index 000000000..ab0b4995a --- /dev/null +++ b/langfuse/api/core/http_sse/__init__.py @@ -0,0 +1,48 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from ._api import EventSource, aconnect_sse, connect_sse + from ._exceptions import SSEError + from ._models import ServerSentEvent +_dynamic_imports: typing.Dict[str, str] = { + "EventSource": "._api", + "SSEError": "._exceptions", + "ServerSentEvent": "._models", + "aconnect_sse": "._api", + "connect_sse": "._api", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"] diff --git a/langfuse/api/core/http_sse/_api.py b/langfuse/api/core/http_sse/_api.py new file mode 100644 index 000000000..eb739a22b --- /dev/null +++ b/langfuse/api/core/http_sse/_api.py @@ -0,0 +1,114 @@ +# This file was auto-generated by Fern from our API Definition. + +import re +from contextlib import asynccontextmanager, contextmanager +from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast + +import httpx +from ._decoders import SSEDecoder +from ._exceptions import SSEError +from ._models import ServerSentEvent + + +class EventSource: + def __init__(self, response: httpx.Response) -> None: + self._response = response + + def _check_content_type(self) -> None: + content_type = self._response.headers.get("content-type", "").partition(";")[0] + if "text/event-stream" not in content_type: + raise SSEError( + f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}" + ) + + def _get_charset(self) -> str: + """Extract charset from Content-Type header, fallback to UTF-8.""" + content_type = self._response.headers.get("content-type", "") + + # Parse charset parameter using regex + charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE) + if charset_match: + charset = charset_match.group(1).strip("\"'") + # Validate that it's a known encoding + try: + # Test if the charset is valid by trying to encode/decode + "test".encode(charset).decode(charset) + return charset + except (LookupError, UnicodeError): + # If charset is invalid, fall back to UTF-8 + pass + + # Default to UTF-8 if no charset specified or invalid charset + return "utf-8" + + @property + def response(self) -> httpx.Response: + return self._response + + def iter_sse(self) -> Iterator[ServerSentEvent]: + self._check_content_type() + decoder = SSEDecoder() + charset = self._get_charset() + + buffer = "" + for chunk in self._response.iter_bytes(): + # Decode chunk using detected charset + text_chunk = chunk.decode(charset, errors="replace") + buffer += text_chunk + + # Process complete lines + while "\n" in buffer: + line, buffer = buffer.split("\n", 1) + line = line.rstrip("\r") + sse = decoder.decode(line) + # when we reach a "\n\n" => line = '' + # => decoder will attempt to return an SSE Event + if sse is not None: + yield sse + + # Process any remaining data in buffer + if buffer.strip(): + line = buffer.rstrip("\r") + sse = decoder.decode(line) + if sse is not None: + yield sse + + async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]: + self._check_content_type() + decoder = SSEDecoder() + lines = cast(AsyncGenerator[str, None], self._response.aiter_lines()) + try: + async for line in lines: + line = line.rstrip("\n") + sse = decoder.decode(line) + if sse is not None: + yield sse + finally: + await lines.aclose() + + +@contextmanager +def connect_sse( + client: httpx.Client, method: str, url: str, **kwargs: Any +) -> Iterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) + + +@asynccontextmanager +async def aconnect_sse( + client: httpx.AsyncClient, + method: str, + url: str, + **kwargs: Any, +) -> AsyncIterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + async with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) diff --git a/langfuse/api/core/http_sse/_decoders.py b/langfuse/api/core/http_sse/_decoders.py new file mode 100644 index 000000000..bdec57b44 --- /dev/null +++ b/langfuse/api/core/http_sse/_decoders.py @@ -0,0 +1,66 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import List, Optional + +from ._models import ServerSentEvent + + +class SSEDecoder: + def __init__(self) -> None: + self._event = "" + self._data: List[str] = [] + self._last_event_id = "" + self._retry: Optional[int] = None + + def decode(self, line: str) -> Optional[ServerSentEvent]: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if ( + not self._event + and not self._data + and not self._last_event_id + and self._retry is None + ): + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = "" + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None diff --git a/langfuse/api/core/http_sse/_exceptions.py b/langfuse/api/core/http_sse/_exceptions.py new file mode 100644 index 000000000..81605a8a6 --- /dev/null +++ b/langfuse/api/core/http_sse/_exceptions.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import httpx + + +class SSEError(httpx.TransportError): + pass diff --git a/langfuse/api/core/http_sse/_models.py b/langfuse/api/core/http_sse/_models.py new file mode 100644 index 000000000..1af57f8fd --- /dev/null +++ b/langfuse/api/core/http_sse/_models.py @@ -0,0 +1,17 @@ +# This file was auto-generated by Fern from our API Definition. + +import json +from dataclasses import dataclass +from typing import Any, Optional + + +@dataclass(frozen=True) +class ServerSentEvent: + event: str = "message" + data: str = "" + id: str = "" + retry: Optional[int] = None + + def json(self) -> Any: + """Parse the data field as JSON.""" + return json.loads(self.data) diff --git a/langfuse/api/core/jsonable_encoder.py b/langfuse/api/core/jsonable_encoder.py index 7a05e9190..90f53dfa7 100644 --- a/langfuse/api/core/jsonable_encoder.py +++ b/langfuse/api/core/jsonable_encoder.py @@ -11,35 +11,23 @@ import base64 import dataclasses import datetime as dt -from collections import defaultdict from enum import Enum from pathlib import PurePath from types import GeneratorType -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Set, Union +import pydantic from .datetime_utils import serialize_datetime -from .pydantic_utilities import pydantic_v1 +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + encode_by_type, + to_jsonable_with_fallback, +) SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] -def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]], -) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( - tuple - ) - for type_, encoder in type_encoder_map.items(): - encoders_by_class_tuples[encoder] += (type_,) - return encoders_by_class_tuples - - -encoders_by_class_tuples = generate_encoders_by_class_tuples( - pydantic_v1.json.ENCODERS_BY_TYPE -) - - def jsonable_encoder( obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None ) -> Any: @@ -51,16 +39,21 @@ def jsonable_encoder( for encoder_type, encoder_instance in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder_instance(obj) - if isinstance(obj, pydantic_v1.BaseModel): - encoder = getattr(obj.__config__, "json_encoders", {}) + if isinstance(obj, pydantic.BaseModel): + if IS_PYDANTIC_V2: + encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2 + else: + encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1 if custom_encoder: encoder.update(custom_encoder) obj_dict = obj.dict(by_alias=True) if "__root__" in obj_dict: obj_dict = obj_dict["__root__"] + if "root" in obj_dict: + obj_dict = obj_dict["root"] return jsonable_encoder(obj_dict, custom_encoder=encoder) if dataclasses.is_dataclass(obj): - obj_dict = dataclasses.asdict(obj) + obj_dict = dataclasses.asdict(obj) # type: ignore return jsonable_encoder(obj_dict, custom_encoder=custom_encoder) if isinstance(obj, bytes): return base64.b64encode(obj).decode("utf-8") @@ -89,20 +82,21 @@ def jsonable_encoder( encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) return encoded_list - if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE: - return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj) - for encoder, classes_tuple in encoders_by_class_tuples.items(): - if isinstance(obj, classes_tuple): - return encoder(obj) + def fallback_serializer(o: Any) -> Any: + attempt_encode = encode_by_type(o) + if attempt_encode is not None: + return attempt_encode - try: - data = dict(obj) - except Exception as e: - errors: List[Exception] = [] - errors.append(e) try: - data = vars(obj) + data = dict(o) except Exception as e: + errors: List[Exception] = [] errors.append(e) - raise ValueError(errors) from e - return jsonable_encoder(data, custom_encoder=custom_encoder) + try: + data = vars(o) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder(data, custom_encoder=custom_encoder) + + return to_jsonable_with_fallback(obj, fallback_serializer) diff --git a/langfuse/api/core/pydantic_utilities.py b/langfuse/api/core/pydantic_utilities.py index a72c1a52f..d2b7b51b6 100644 --- a/langfuse/api/core/pydantic_utilities.py +++ b/langfuse/api/core/pydantic_utilities.py @@ -1,28 +1,310 @@ # This file was auto-generated by Fern from our API Definition. -import typing +# nopycln: file +import datetime as dt +from collections import defaultdict +from typing import ( + Any, + Callable, + ClassVar, + Dict, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) import pydantic IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import + from pydantic.v1.datetime_parse import parse_date as parse_date + from pydantic.v1.datetime_parse import parse_datetime as parse_datetime + from pydantic.v1.fields import ModelField as ModelField + from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined] + from pydantic.v1.typing import get_args as get_args + from pydantic.v1.typing import get_origin as get_origin + from pydantic.v1.typing import is_literal_type as is_literal_type + from pydantic.v1.typing import is_union as is_union else: - import pydantic as pydantic_v1 # type: ignore # nopycln: import + from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef] + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef] + from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef] + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef] + from pydantic.typing import get_args as get_args # type: ignore[no-redef] + from pydantic.typing import get_origin as get_origin # type: ignore[no-redef] + from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef] + from pydantic.typing import is_union as is_union # type: ignore[no-redef] + +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata +from typing_extensions import TypeAlias + +T = TypeVar("T") +Model = TypeVar("Model", bound=pydantic.BaseModel) + + +def parse_obj_as(type_: Type[T], object_: Any) -> T: + dealiased_object = convert_and_respect_annotation_metadata( + object_=object_, annotation=type_, direction="read" + ) + if IS_PYDANTIC_V2: + adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined] + return adapter.validate_python(dealiased_object) + return pydantic.parse_obj_as(type_, dealiased_object) + + +def to_jsonable_with_fallback( + obj: Any, fallback_serializer: Callable[[Any], Any] +) -> Any: + if IS_PYDANTIC_V2: + from pydantic_core import to_jsonable_python + + return to_jsonable_python(obj, fallback=fallback_serializer) + return fallback_serializer(obj) + + +class UniversalBaseModel(pydantic.BaseModel): + if IS_PYDANTIC_V2: + model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key] + # Allow fields beginning with `model_` to be used in the model + protected_namespaces=(), + ) + + @pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined] + def serialize_model(self) -> Any: # type: ignore[name-defined] + serialized = self.dict() # type: ignore[attr-defined] + data = { + k: serialize_datetime(v) if isinstance(v, dt.datetime) else v + for k, v in serialized.items() + } + return data + + else: + + class Config: + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} + + @classmethod + def model_construct( + cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any + ) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata( + object_=values, annotation=cls, direction="read" + ) + return cls.construct(_fields_set, **dealiased_object) + + @classmethod + def construct( + cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any + ) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata( + object_=values, annotation=cls, direction="read" + ) + if IS_PYDANTIC_V2: + return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc] + return super().construct(_fields_set, **dealiased_object) + + def json(self, **kwargs: Any) -> str: + kwargs_with_defaults = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + if IS_PYDANTIC_V2: + return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc] + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: Any) -> Dict[str, Any]: + """ + Override the default dict method to `exclude_unset` by default. This function patches + `exclude_unset` to work include fields within non-None default values. + """ + # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2 + # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice. + # + # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models + # that we have less control over, and this is less intrusive than custom serializers for now. + if IS_PYDANTIC_V2: + kwargs_with_defaults_exclude_unset = { + **kwargs, + "by_alias": True, + "exclude_unset": True, + "exclude_none": False, + } + kwargs_with_defaults_exclude_none = { + **kwargs, + "by_alias": True, + "exclude_none": True, + "exclude_unset": False, + } + dict_dump = deep_union_pydantic_dicts( + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc] + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc] + ) + + else: + _fields_set = self.__fields_set__.copy() + + fields = _get_model_fields(self.__class__) + for name, field in fields.items(): + if name not in _fields_set: + default = _get_field_default(field) + + # If the default values are non-null act like they've been set + # This effectively allows exclude_unset to work like exclude_none where + # the latter passes through intentionally set none values. + if default is not None or ( + "exclude_unset" in kwargs and not kwargs["exclude_unset"] + ): + _fields_set.add(name) + + if default is not None: + self.__fields_set__.add(name) + + kwargs_with_defaults_exclude_unset_include_fields = { + "by_alias": True, + "exclude_unset": True, + "include": _fields_set, + **kwargs, + } + + dict_dump = super().dict( + **kwargs_with_defaults_exclude_unset_include_fields + ) + + return cast( + Dict[str, Any], + convert_and_respect_annotation_metadata( + object_=dict_dump, annotation=self.__class__, direction="write" + ), + ) + + +def _union_list_of_pydantic_dicts( + source: List[Any], destination: List[Any] +) -> List[Any]: + converted_list: List[Any] = [] + for i, item in enumerate(source): + destination_value = destination[i] + if isinstance(item, dict): + converted_list.append(deep_union_pydantic_dicts(item, destination_value)) + elif isinstance(item, list): + converted_list.append( + _union_list_of_pydantic_dicts(item, destination_value) + ) + else: + converted_list.append(item) + return converted_list def deep_union_pydantic_dicts( - source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] -) -> typing.Dict[str, typing.Any]: + source: Dict[str, Any], destination: Dict[str, Any] +) -> Dict[str, Any]: for key, value in source.items(): + node = destination.setdefault(key, {}) if isinstance(value, dict): - node = destination.setdefault(key, {}) deep_union_pydantic_dicts(value, node) + # Note: we do not do this same processing for sets given we do not have sets of models + # and given the sets are unordered, the processing of the set and matching objects would + # be non-trivial. + elif isinstance(value, list): + destination[key] = _union_list_of_pydantic_dicts(value, node) else: destination[key] = value return destination -__all__ = ["pydantic_v1"] +if IS_PYDANTIC_V2: + + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg] + pass + + UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] +else: + UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef] + + +def encode_by_type(o: Any) -> Any: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( + tuple + ) + for type_, encoder in encoders_by_type.items(): + encoders_by_class_tuples[encoder] += (type_,) + + if type(o) in encoders_by_type: + return encoders_by_type[type(o)](o) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(o, classes_tuple): + return encoder(o) + + +def update_forward_refs(model: Type["Model"], **localns: Any) -> None: + if IS_PYDANTIC_V2: + model.model_rebuild(raise_errors=False) # type: ignore[attr-defined] + else: + model.update_forward_refs(**localns) + + +# Mirrors Pydantic's internal typing +AnyCallable = Callable[..., Any] + + +def universal_root_validator( + pre: bool = False, +) -> Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + # In Pydantic v2, for RootModel we always use "before" mode + # The custom validators transform the input value before the model is created + return cast(AnyCallable, pydantic.model_validator(mode="before")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] + + return decorator + + +def universal_field_validator( + field_name: str, pre: bool = False +) -> Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return cast( + AnyCallable, + pydantic.field_validator(field_name, mode="before" if pre else "after")( + func + ), + ) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func)) + + return decorator + + +PydanticField = Union[ModelField, pydantic.fields.FieldInfo] + + +def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]: + if IS_PYDANTIC_V2: + return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined] + return cast(Mapping[str, PydanticField], model.__fields__) + + +def _get_field_default(field: PydanticField) -> Any: + try: + value = field.get_default() # type: ignore[union-attr] + except: + value = field.default + if IS_PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value diff --git a/langfuse/api/core/query_encoder.py b/langfuse/api/core/query_encoder.py index 069633086..03fbf59bd 100644 --- a/langfuse/api/core/query_encoder.py +++ b/langfuse/api/core/query_encoder.py @@ -1,39 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -from collections import ChainMap -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional, Tuple -from .pydantic_utilities import pydantic_v1 +import pydantic # Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict def traverse_query_dict( dict_flat: Dict[str, Any], key_prefix: Optional[str] = None -) -> Dict[str, Any]: - result = {} +) -> List[Tuple[str, Any]]: + result = [] for k, v in dict_flat.items(): key = f"{key_prefix}[{k}]" if key_prefix is not None else k if isinstance(v, dict): - result.update(traverse_query_dict(v, key)) + result.extend(traverse_query_dict(v, key)) + elif isinstance(v, list): + for arr_v in v: + if isinstance(arr_v, dict): + result.extend(traverse_query_dict(arr_v, key)) + else: + result.append((key, arr_v)) else: - result[key] = v + result.append((key, v)) return result -def single_query_encoder(query_key: str, query_value: Any) -> Dict[str, Any]: - if isinstance(query_value, pydantic_v1.BaseModel) or isinstance(query_value, dict): - if isinstance(query_value, pydantic_v1.BaseModel): +def single_query_encoder(query_key: str, query_value: Any) -> List[Tuple[str, Any]]: + if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict): + if isinstance(query_value, pydantic.BaseModel): obj_dict = query_value.dict(by_alias=True) else: obj_dict = query_value return traverse_query_dict(obj_dict, query_key) + elif isinstance(query_value, list): + encoded_values: List[Tuple[str, Any]] = [] + for value in query_value: + if isinstance(value, pydantic.BaseModel) or isinstance(value, dict): + if isinstance(value, pydantic.BaseModel): + obj_dict = value.dict(by_alias=True) + elif isinstance(value, dict): + obj_dict = value - return {query_key: query_value} + encoded_values.extend(single_query_encoder(query_key, obj_dict)) + else: + encoded_values.append((query_key, value)) + return encoded_values -def encode_query(query: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: - return ( - dict(ChainMap(*[single_query_encoder(k, v) for k, v in query.items()])) - if query is not None - else None - ) + return [(query_key, query_value)] + + +def encode_query(query: Optional[Dict[str, Any]]) -> Optional[List[Tuple[str, Any]]]: + if query is None: + return None + + encoded_query = [] + for k, v in query.items(): + encoded_query.extend(single_query_encoder(k, v)) + return encoded_query diff --git a/langfuse/api/core/request_options.py b/langfuse/api/core/request_options.py index d0bf0dbce..1b3880443 100644 --- a/langfuse/api/core/request_options.py +++ b/langfuse/api/core/request_options.py @@ -23,6 +23,8 @@ class RequestOptions(typing.TypedDict, total=False): - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict + + - chunk_size: int. The size, in bytes, to process each chunk of data being streamed back within the response. This equates to leveraging `chunk_size` within `requests` or `httpx`, and is only leveraged for file downloads. """ timeout_in_seconds: NotRequired[int] @@ -30,3 +32,4 @@ class RequestOptions(typing.TypedDict, total=False): additional_headers: NotRequired[typing.Dict[str, typing.Any]] additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]] additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]] + chunk_size: NotRequired[int] diff --git a/langfuse/api/core/serialization.py b/langfuse/api/core/serialization.py new file mode 100644 index 000000000..ad6eb8d7f --- /dev/null +++ b/langfuse/api/core/serialization.py @@ -0,0 +1,282 @@ +# This file was auto-generated by Fern from our API Definition. + +import collections +import inspect +import typing + +import pydantic +import typing_extensions + + +class FieldMetadata: + """ + Metadata class used to annotate fields to provide additional information. + + Example: + class MyDict(TypedDict): + field: typing.Annotated[str, FieldMetadata(alias="field_name")] + + Will serialize: `{"field": "value"}` + To: `{"field_name": "value"}` + """ + + alias: str + + def __init__(self, *, alias: str) -> None: + self.alias = alias + + +def convert_and_respect_annotation_metadata( + *, + object_: typing.Any, + annotation: typing.Any, + inner_type: typing.Optional[typing.Any] = None, + direction: typing.Literal["read", "write"], +) -> typing.Any: + """ + Respect the metadata annotations on a field, such as aliasing. This function effectively + manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for + TypedDicts, which cannot support aliasing out of the box, and can be extended for additional + utilities, such as defaults. + + Parameters + ---------- + object_ : typing.Any + + annotation : type + The type we're looking to apply typing annotations from + + inner_type : typing.Optional[type] + + Returns + ------- + typing.Any + """ + + if object_ is None: + return None + if inner_type is None: + inner_type = annotation + + clean_type = _remove_annotations(inner_type) + # Pydantic models + if ( + inspect.isclass(clean_type) + and issubclass(clean_type, pydantic.BaseModel) + and isinstance(object_, typing.Mapping) + ): + return _convert_mapping(object_, clean_type, direction) + # TypedDicts + if typing_extensions.is_typeddict(clean_type) and isinstance( + object_, typing.Mapping + ): + return _convert_mapping(object_, clean_type, direction) + + if ( + typing_extensions.get_origin(clean_type) == typing.Dict + or typing_extensions.get_origin(clean_type) == dict + or clean_type == typing.Dict + ) and isinstance(object_, typing.Dict): + key_type = typing_extensions.get_args(clean_type)[0] + value_type = typing_extensions.get_args(clean_type)[1] + + return { + key: convert_and_respect_annotation_metadata( + object_=value, + annotation=annotation, + inner_type=value_type, + direction=direction, + ) + for key, value in object_.items() + } + + # If you're iterating on a string, do not bother to coerce it to a sequence. + if not isinstance(object_, str): + if ( + typing_extensions.get_origin(clean_type) == typing.Set + or typing_extensions.get_origin(clean_type) == set + or clean_type == typing.Set + ) and isinstance(object_, typing.Set): + inner_type = typing_extensions.get_args(clean_type)[0] + return { + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + } + elif ( + ( + typing_extensions.get_origin(clean_type) == typing.List + or typing_extensions.get_origin(clean_type) == list + or clean_type == typing.List + ) + and isinstance(object_, typing.List) + ) or ( + ( + typing_extensions.get_origin(clean_type) == typing.Sequence + or typing_extensions.get_origin(clean_type) == collections.abc.Sequence + or clean_type == typing.Sequence + ) + and isinstance(object_, typing.Sequence) + ): + inner_type = typing_extensions.get_args(clean_type)[0] + return [ + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + ] + + if typing_extensions.get_origin(clean_type) == typing.Union: + # We should be able to ~relatively~ safely try to convert keys against all + # member types in the union, the edge case here is if one member aliases a field + # of the same name to a different name from another member + # Or if another member aliases a field of the same name that another member does not. + for member in typing_extensions.get_args(clean_type): + object_ = convert_and_respect_annotation_metadata( + object_=object_, + annotation=annotation, + inner_type=member, + direction=direction, + ) + return object_ + + annotated_type = _get_annotation(annotation) + if annotated_type is None: + return object_ + + # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.) + # Then we can safely call it on the recursive conversion. + return object_ + + +def _convert_mapping( + object_: typing.Mapping[str, object], + expected_type: typing.Any, + direction: typing.Literal["read", "write"], +) -> typing.Mapping[str, object]: + converted_object: typing.Dict[str, object] = {} + try: + annotations = typing_extensions.get_type_hints( + expected_type, include_extras=True + ) + except NameError: + # The TypedDict contains a circular reference, so + # we use the __annotations__ attribute directly. + annotations = getattr(expected_type, "__annotations__", {}) + aliases_to_field_names = _get_alias_to_field_name(annotations) + for key, value in object_.items(): + if direction == "read" and key in aliases_to_field_names: + dealiased_key = aliases_to_field_names.get(key) + if dealiased_key is not None: + type_ = annotations.get(dealiased_key) + else: + type_ = annotations.get(key) + # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map + # + # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias + # then we can just pass the value through as is + if type_ is None: + converted_object[key] = value + elif direction == "read" and key not in aliases_to_field_names: + converted_object[key] = convert_and_respect_annotation_metadata( + object_=value, annotation=type_, direction=direction + ) + else: + converted_object[ + _alias_key(key, type_, direction, aliases_to_field_names) + ] = convert_and_respect_annotation_metadata( + object_=value, annotation=type_, direction=direction + ) + return converted_object + + +def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return None + + if maybe_annotated_type == typing_extensions.NotRequired: + type_ = typing_extensions.get_args(type_)[0] + maybe_annotated_type = typing_extensions.get_origin(type_) + + if maybe_annotated_type == typing_extensions.Annotated: + return type_ + + return None + + +def _remove_annotations(type_: typing.Any) -> typing.Any: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return type_ + + if maybe_annotated_type == typing_extensions.NotRequired: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + if maybe_annotated_type == typing_extensions.Annotated: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + return type_ + + +def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_alias_to_field_name(annotations) + + +def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_field_to_alias_name(annotations) + + +def _get_alias_to_field_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[maybe_alias] = field + return aliases + + +def _get_field_to_alias_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[field] = maybe_alias + return aliases + + +def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]: + maybe_annotated_type = _get_annotation(type_) + + if maybe_annotated_type is not None: + # The actual annotations are 1 onward, the first is the annotated type + annotations = typing_extensions.get_args(maybe_annotated_type)[1:] + + for annotation in annotations: + if isinstance(annotation, FieldMetadata) and annotation.alias is not None: + return annotation.alias + return None + + +def _alias_key( + key: str, + type_: typing.Any, + direction: typing.Literal["read", "write"], + aliases_to_field_names: typing.Dict[str, str], +) -> str: + if direction == "read": + return aliases_to_field_names.get(key, key) + return _get_alias_from_type(type_=type_) or key diff --git a/langfuse/api/reference.md b/langfuse/api/reference.md deleted file mode 100644 index 79be952da..000000000 --- a/langfuse/api/reference.md +++ /dev/null @@ -1,7444 +0,0 @@ -# Reference -## AnnotationQueues -
client.annotation_queues.list_queues(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all annotation queues -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.list_queues() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.create_queue(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create an annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateAnnotationQueueRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.create_queue( - request=CreateAnnotationQueueRequest( - name="name", - score_config_ids=["scoreConfigIds", "scoreConfigIds"], - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateAnnotationQueueRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.get_queue(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get an annotation queue by ID -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.get_queue( - queue_id="queueId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.list_queue_items(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get items for a specific annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.list_queue_items( - queue_id="queueId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**status:** `typing.Optional[AnnotationQueueStatus]` — Filter by status - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.get_queue_item(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a specific item from an annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.get_queue_item( - queue_id="queueId", - item_id="itemId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**item_id:** `str` — The unique identifier of the annotation queue item - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.create_queue_item(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Add an item to an annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import AnnotationQueueObjectType, CreateAnnotationQueueItemRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.create_queue_item( - queue_id="queueId", - request=CreateAnnotationQueueItemRequest( - object_id="objectId", - object_type=AnnotationQueueObjectType.TRACE, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**request:** `CreateAnnotationQueueItemRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.update_queue_item(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Update an annotation queue item -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import UpdateAnnotationQueueItemRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.update_queue_item( - queue_id="queueId", - item_id="itemId", - request=UpdateAnnotationQueueItemRequest(), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**item_id:** `str` — The unique identifier of the annotation queue item - -
-
- -
-
- -**request:** `UpdateAnnotationQueueItemRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.delete_queue_item(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Remove an item from an annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.delete_queue_item( - queue_id="queueId", - item_id="itemId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**item_id:** `str` — The unique identifier of the annotation queue item - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.create_queue_assignment(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create an assignment for a user to an annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import AnnotationQueueAssignmentRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.create_queue_assignment( - queue_id="queueId", - request=AnnotationQueueAssignmentRequest( - user_id="userId", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**request:** `AnnotationQueueAssignmentRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.annotation_queues.delete_queue_assignment(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete an assignment for a user to an annotation queue -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import AnnotationQueueAssignmentRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.annotation_queues.delete_queue_assignment( - queue_id="queueId", - request=AnnotationQueueAssignmentRequest( - user_id="userId", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**queue_id:** `str` — The unique identifier of the annotation queue - -
-
- -
-
- -**request:** `AnnotationQueueAssignmentRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## BlobStorageIntegrations -
client.blob_storage_integrations.get_blob_storage_integrations() -
-
- -#### 📝 Description - -
-
- -
-
- -Get all blob storage integrations for the organization (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.blob_storage_integrations.get_blob_storage_integrations() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.blob_storage_integrations.upsert_blob_storage_integration(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create or update a blob storage integration for a specific project (requires organization-scoped API key). The configuration is validated by performing a test upload to the bucket. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import ( - BlobStorageExportFrequency, - BlobStorageExportMode, - BlobStorageIntegrationFileType, - BlobStorageIntegrationType, - CreateBlobStorageIntegrationRequest, -) -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.blob_storage_integrations.upsert_blob_storage_integration( - request=CreateBlobStorageIntegrationRequest( - project_id="projectId", - type=BlobStorageIntegrationType.S_3, - bucket_name="bucketName", - region="region", - export_frequency=BlobStorageExportFrequency.HOURLY, - enabled=True, - force_path_style=True, - file_type=BlobStorageIntegrationFileType.JSON, - export_mode=BlobStorageExportMode.FULL_HISTORY, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateBlobStorageIntegrationRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.blob_storage_integrations.delete_blob_storage_integration(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a blob storage integration by ID (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.blob_storage_integrations.delete_blob_storage_integration( - id="id", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Comments -
client.comments.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a comment. Comments may be attached to different object types (trace, observation, session, prompt). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateCommentRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.comments.create( - request=CreateCommentRequest( - project_id="projectId", - object_type="objectType", - object_id="objectId", - content="content", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateCommentRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.comments.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all comments -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.comments.get() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — Page number, starts at 1. - -
-
- -
-
- -**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit - -
-
- -
-
- -**object_type:** `typing.Optional[str]` — Filter comments by object type (trace, observation, session, prompt). - -
-
- -
-
- -**object_id:** `typing.Optional[str]` — Filter comments by object id. If objectType is not provided, an error will be thrown. - -
-
- -
-
- -**author_user_id:** `typing.Optional[str]` — Filter comments by author user id. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.comments.get_by_id(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a comment by id -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.comments.get_by_id( - comment_id="commentId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**comment_id:** `str` — The unique langfuse identifier of a comment - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## DatasetItems -
client.dataset_items.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a dataset item -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateDatasetItemRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.dataset_items.create( - request=CreateDatasetItemRequest( - dataset_name="datasetName", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateDatasetItemRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.dataset_items.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a dataset item -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.dataset_items.get( - id="id", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.dataset_items.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get dataset items -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.dataset_items.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**dataset_name:** `typing.Optional[str]` - -
-
- -
-
- -**source_trace_id:** `typing.Optional[str]` - -
-
- -
-
- -**source_observation_id:** `typing.Optional[str]` - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.dataset_items.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a dataset item and all its run items. This action is irreversible. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.dataset_items.delete( - id="id", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## DatasetRunItems -
client.dataset_run_items.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a dataset run item -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateDatasetRunItemRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.dataset_run_items.create( - request=CreateDatasetRunItemRequest( - run_name="runName", - dataset_item_id="datasetItemId", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateDatasetRunItemRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.dataset_run_items.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -List dataset run items -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.dataset_run_items.list( - dataset_id="datasetId", - run_name="runName", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**dataset_id:** `str` - -
-
- -
-
- -**run_name:** `str` - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Datasets -
client.datasets.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all datasets -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.datasets.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.datasets.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a dataset -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.datasets.get( - dataset_name="datasetName", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**dataset_name:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.datasets.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a dataset -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateDatasetRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.datasets.create( - request=CreateDatasetRequest( - name="name", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateDatasetRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.datasets.get_run(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a dataset run and its items -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.datasets.get_run( - dataset_name="datasetName", - run_name="runName", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**dataset_name:** `str` - -
-
- -
-
- -**run_name:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.datasets.delete_run(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a dataset run and all its run items. This action is irreversible. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.datasets.delete_run( - dataset_name="datasetName", - run_name="runName", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**dataset_name:** `str` - -
-
- -
-
- -**run_name:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.datasets.get_runs(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get dataset runs -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.datasets.get_runs( - dataset_name="datasetName", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**dataset_name:** `str` - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Health -
client.health.health() -
-
- -#### 📝 Description - -
-
- -
-
- -Check health of API and database -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.health.health() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Ingestion -
client.ingestion.batch(...) -
-
- -#### 📝 Description - -
-
- -
-
- -**Legacy endpoint for batch ingestion for Langfuse Observability.** - --> Please use the OpenTelemetry endpoint (`/api/public/otel/v1/traces`). Learn more: https://langfuse.com/integrations/native/opentelemetry - -Within each batch, there can be multiple events. -Each event has a type, an id, a timestamp, metadata and a body. -Internally, we refer to this as the "event envelope" as it tells us something about the event but not the trace. -We use the event id within this envelope to deduplicate messages to avoid processing the same event twice, i.e. the event id should be unique per request. -The event.body.id is the ID of the actual trace and will be used for updates and will be visible within the Langfuse App. -I.e. if you want to update a trace, you'd use the same body id, but separate event IDs. - -Notes: -- Introduction to data model: https://langfuse.com/docs/observability/data-model -- Batch sizes are limited to 3.5 MB in total. You need to adjust the number of events per batch accordingly. -- The API does not return a 4xx status code for input errors. Instead, it responds with a 207 status code, which includes a list of the encountered errors. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import IngestionEvent_ScoreCreate, ScoreBody -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.ingestion.batch( - batch=[ - IngestionEvent_ScoreCreate( - id="abcdef-1234-5678-90ab", - timestamp="2022-01-01T00:00:00.000Z", - body=ScoreBody( - id="abcdef-1234-5678-90ab", - trace_id="1234-5678-90ab-cdef", - name="My Score", - value=0.9, - environment="default", - ), - ) - ], -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**batch:** `typing.Sequence[IngestionEvent]` — Batch of tracing events to be ingested. Discriminated by attribute `type`. - -
-
- -
-
- -**metadata:** `typing.Optional[typing.Any]` — Optional. Metadata field used by the Langfuse SDKs for debugging. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## LlmConnections -
client.llm_connections.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all LLM connections in a project -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.llm_connections.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.llm_connections.upsert(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create or update an LLM connection. The connection is upserted on provider. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import LlmAdapter, UpsertLlmConnectionRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.llm_connections.upsert( - request=UpsertLlmConnectionRequest( - provider="provider", - adapter=LlmAdapter.ANTHROPIC, - secret_key="secretKey", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `UpsertLlmConnectionRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Media -
client.media.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a media record -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.media.get( - media_id="mediaId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**media_id:** `str` — The unique langfuse identifier of a media record - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.media.patch(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Patch a media record -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -import datetime - -from langfuse import PatchMediaBody -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.media.patch( - media_id="mediaId", - request=PatchMediaBody( - uploaded_at=datetime.datetime.fromisoformat( - "2024-01-15 09:30:00+00:00", - ), - upload_http_status=1, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**media_id:** `str` — The unique langfuse identifier of a media record - -
-
- -
-
- -**request:** `PatchMediaBody` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.media.get_upload_url(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a presigned upload URL for a media record -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import GetMediaUploadUrlRequest, MediaContentType -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.media.get_upload_url( - request=GetMediaUploadUrlRequest( - trace_id="traceId", - content_type=MediaContentType.IMAGE_PNG, - content_length=1, - sha_256_hash="sha256Hash", - field="field", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `GetMediaUploadUrlRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Metrics -
client.metrics.metrics(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get metrics from the Langfuse project using a query object. - -For more details, see the [Metrics API documentation](https://langfuse.com/docs/metrics/features/metrics-api). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.metrics.metrics( - query="query", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**query:** `str` - -JSON string containing the query parameters with the following structure: -```json -{ - "view": string, // Required. One of "traces", "observations", "scores-numeric", "scores-categorical" - "dimensions": [ // Optional. Default: [] - { - "field": string // Field to group by, e.g. "name", "userId", "sessionId" - } - ], - "metrics": [ // Required. At least one metric must be provided - { - "measure": string, // What to measure, e.g. "count", "latency", "value" - "aggregation": string // How to aggregate, e.g. "count", "sum", "avg", "p95", "histogram" - } - ], - "filters": [ // Optional. Default: [] - { - "column": string, // Column to filter on - "operator": string, // Operator, e.g. "=", ">", "<", "contains" - "value": any, // Value to compare against - "type": string, // Data type, e.g. "string", "number", "stringObject" - "key": string // Required only when filtering on metadata - } - ], - "timeDimension": { // Optional. Default: null. If provided, results will be grouped by time - "granularity": string // One of "minute", "hour", "day", "week", "month", "auto" - }, - "fromTimestamp": string, // Required. ISO datetime string for start of time range - "toTimestamp": string, // Required. ISO datetime string for end of time range - "orderBy": [ // Optional. Default: null - { - "field": string, // Field to order by - "direction": string // "asc" or "desc" - } - ], - "config": { // Optional. Query-specific configuration - "bins": number, // Optional. Number of bins for histogram (1-100), default: 10 - "row_limit": number // Optional. Row limit for results (1-1000) - } -} -``` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Models -
client.models.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a model -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateModelRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.models.create( - request=CreateModelRequest( - model_name="modelName", - match_pattern="matchPattern", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateModelRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.models.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all models -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.models.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.models.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a model -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.models.get( - id="id", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.models.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a model. Cannot delete models managed by Langfuse. You can create your own definition with the same modelName to override the definition though. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.models.delete( - id="id", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Observations -
client.observations.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a observation -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.observations.get( - observation_id="observationId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**observation_id:** `str` — The unique langfuse identifier of an observation, can be an event, span or generation - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.observations.get_many(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a list of observations -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.observations.get_many() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — Page number, starts at 1. - -
-
- -
-
- -**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - -
-
- -
-
- -**name:** `typing.Optional[str]` - -
-
- -
-
- -**user_id:** `typing.Optional[str]` - -
-
- -
-
- -**type:** `typing.Optional[str]` - -
-
- -
-
- -**trace_id:** `typing.Optional[str]` - -
-
- -
-
- -**level:** `typing.Optional[ObservationLevel]` — Optional filter for observations with a specific level (e.g. "DEBUG", "DEFAULT", "WARNING", "ERROR"). - -
-
- -
-
- -**parent_observation_id:** `typing.Optional[str]` - -
-
- -
-
- -**environment:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Optional filter for observations where the environment is one of the provided values. - -
-
- -
-
- -**from_start_time:** `typing.Optional[dt.datetime]` — Retrieve only observations with a start_time on or after this datetime (ISO 8601). - -
-
- -
-
- -**to_start_time:** `typing.Optional[dt.datetime]` — Retrieve only observations with a start_time before this datetime (ISO 8601). - -
-
- -
-
- -**version:** `typing.Optional[str]` — Optional filter to only include observations with a certain version. - -
-
- -
-
- -**filter:** `typing.Optional[str]` - -JSON string containing an array of filter conditions. When provided, this takes precedence over query parameter filters (userId, name, type, level, environment, fromStartTime, ...). - -## Filter Structure -Each filter condition has the following structure: -```json -[ - { - "type": string, // Required. One of: "datetime", "string", "number", "stringOptions", "categoryOptions", "arrayOptions", "stringObject", "numberObject", "boolean", "null" - "column": string, // Required. Column to filter on (see available columns below) - "operator": string, // Required. Operator based on type: - // - datetime: ">", "<", ">=", "<=" - // - string: "=", "contains", "does not contain", "starts with", "ends with" - // - stringOptions: "any of", "none of" - // - categoryOptions: "any of", "none of" - // - arrayOptions: "any of", "none of", "all of" - // - number: "=", ">", "<", ">=", "<=" - // - stringObject: "=", "contains", "does not contain", "starts with", "ends with" - // - numberObject: "=", ">", "<", ">=", "<=" - // - boolean: "=", "<>" - // - null: "is null", "is not null" - "value": any, // Required (except for null type). Value to compare against. Type depends on filter type - "key": string // Required only for stringObject, numberObject, and categoryOptions types when filtering on nested fields like metadata - } -] -``` - -## Available Columns - -### Core Observation Fields -- `id` (string) - Observation ID -- `type` (string) - Observation type (SPAN, GENERATION, EVENT) -- `name` (string) - Observation name -- `traceId` (string) - Associated trace ID -- `startTime` (datetime) - Observation start time -- `endTime` (datetime) - Observation end time -- `environment` (string) - Environment tag -- `level` (string) - Log level (DEBUG, DEFAULT, WARNING, ERROR) -- `statusMessage` (string) - Status message -- `version` (string) - Version tag - -### Performance Metrics -- `latency` (number) - Latency in seconds (calculated: end_time - start_time) -- `timeToFirstToken` (number) - Time to first token in seconds -- `tokensPerSecond` (number) - Output tokens per second - -### Token Usage -- `inputTokens` (number) - Number of input tokens -- `outputTokens` (number) - Number of output tokens -- `totalTokens` (number) - Total tokens (alias: `tokens`) - -### Cost Metrics -- `inputCost` (number) - Input cost in USD -- `outputCost` (number) - Output cost in USD -- `totalCost` (number) - Total cost in USD - -### Model Information -- `model` (string) - Provided model name -- `promptName` (string) - Associated prompt name -- `promptVersion` (number) - Associated prompt version - -### Structured Data -- `metadata` (stringObject/numberObject/categoryOptions) - Metadata key-value pairs. Use `key` parameter to filter on specific metadata keys. - -### Scores (requires join with scores table) -- `scores_avg` (number) - Average of numeric scores (alias: `scores`) -- `score_categories` (categoryOptions) - Categorical score values - -### Associated Trace Fields (requires join with traces table) -- `userId` (string) - User ID from associated trace -- `traceName` (string) - Name from associated trace -- `traceEnvironment` (string) - Environment from associated trace -- `traceTags` (arrayOptions) - Tags from associated trace - -## Filter Examples -```json -[ - { - "type": "string", - "column": "type", - "operator": "=", - "value": "GENERATION" - }, - { - "type": "number", - "column": "latency", - "operator": ">=", - "value": 2.5 - }, - { - "type": "stringObject", - "column": "metadata", - "key": "environment", - "operator": "=", - "value": "production" - } -] -``` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Opentelemetry -
client.opentelemetry.export_traces(...) -
-
- -#### 📝 Description - -
-
- -
-
- -**OpenTelemetry Traces Ingestion Endpoint** - -This endpoint implements the OTLP/HTTP specification for trace ingestion, providing native OpenTelemetry integration for Langfuse Observability. - -**Supported Formats:** -- Binary Protobuf: `Content-Type: application/x-protobuf` -- JSON Protobuf: `Content-Type: application/json` -- Supports gzip compression via `Content-Encoding: gzip` header - -**Specification Compliance:** -- Conforms to [OTLP/HTTP Trace Export](https://opentelemetry.io/docs/specs/otlp/#otlphttp) -- Implements `ExportTraceServiceRequest` message format - -**Documentation:** -- Integration guide: https://langfuse.com/integrations/native/opentelemetry -- Data model: https://langfuse.com/docs/observability/data-model -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import ( - OtelAttribute, - OtelAttributeValue, - OtelResource, - OtelResourceSpan, - OtelScope, - OtelScopeSpan, - OtelSpan, -) -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.opentelemetry.export_traces( - resource_spans=[ - OtelResourceSpan( - resource=OtelResource( - attributes=[ - OtelAttribute( - key="service.name", - value=OtelAttributeValue( - string_value="my-service", - ), - ), - OtelAttribute( - key="service.version", - value=OtelAttributeValue( - string_value="1.0.0", - ), - ), - ], - ), - scope_spans=[ - OtelScopeSpan( - scope=OtelScope( - name="langfuse-sdk", - version="2.60.3", - ), - spans=[ - OtelSpan( - trace_id="0123456789abcdef0123456789abcdef", - span_id="0123456789abcdef", - name="my-operation", - kind=1, - start_time_unix_nano="1747872000000000000", - end_time_unix_nano="1747872001000000000", - attributes=[ - OtelAttribute( - key="langfuse.observation.type", - value=OtelAttributeValue( - string_value="generation", - ), - ) - ], - status={}, - ) - ], - ) - ], - ) - ], -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**resource_spans:** `typing.Sequence[OtelResourceSpan]` — Array of resource spans containing trace data as defined in the OTLP specification - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Organizations -
client.organizations.get_organization_memberships() -
-
- -#### 📝 Description - -
-
- -
-
- -Get all memberships for the organization associated with the API key (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.get_organization_memberships() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.update_organization_membership(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create or update a membership for the organization associated with the API key (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import MembershipRequest, MembershipRole -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.update_organization_membership( - request=MembershipRequest( - user_id="userId", - role=MembershipRole.OWNER, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `MembershipRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.delete_organization_membership(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a membership from the organization associated with the API key (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import DeleteMembershipRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.delete_organization_membership( - request=DeleteMembershipRequest( - user_id="userId", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `DeleteMembershipRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.get_project_memberships(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all memberships for a specific project (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.get_project_memberships( - project_id="projectId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.update_project_membership(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create or update a membership for a specific project (requires organization-scoped API key). The user must already be a member of the organization. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import MembershipRequest, MembershipRole -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.update_project_membership( - project_id="projectId", - request=MembershipRequest( - user_id="userId", - role=MembershipRole.OWNER, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**request:** `MembershipRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.delete_project_membership(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a membership from a specific project (requires organization-scoped API key). The user must be a member of the organization. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import DeleteMembershipRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.delete_project_membership( - project_id="projectId", - request=DeleteMembershipRequest( - user_id="userId", - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**request:** `DeleteMembershipRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.get_organization_projects() -
-
- -#### 📝 Description - -
-
- -
-
- -Get all projects for the organization associated with the API key (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.get_organization_projects() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.organizations.get_organization_api_keys() -
-
- -#### 📝 Description - -
-
- -
-
- -Get all API keys for the organization associated with the API key (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.organizations.get_organization_api_keys() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Projects -
client.projects.get() -
-
- -#### 📝 Description - -
-
- -
-
- -Get Project associated with API key -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.get() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.projects.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a new project (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.create( - name="name", - retention=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**name:** `str` - -
-
- -
-
- -**retention:** `int` — Number of days to retain data. Must be 0 or at least 3 days. Requires data-retention entitlement for non-zero values. Optional. - -
-
- -
-
- -**metadata:** `typing.Optional[typing.Dict[str, typing.Any]]` — Optional metadata for the project - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.projects.update(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Update a project by ID (requires organization-scoped API key). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.update( - project_id="projectId", - name="name", - retention=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**name:** `str` - -
-
- -
-
- -**retention:** `int` — Number of days to retain data. Must be 0 or at least 3 days. Requires data-retention entitlement for non-zero values. Optional. - -
-
- -
-
- -**metadata:** `typing.Optional[typing.Dict[str, typing.Any]]` — Optional metadata for the project - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.projects.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a project by ID (requires organization-scoped API key). Project deletion is processed asynchronously. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.delete( - project_id="projectId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.projects.get_api_keys(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all API keys for a project (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.get_api_keys( - project_id="projectId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.projects.create_api_key(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a new API key for a project (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.create_api_key( - project_id="projectId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**note:** `typing.Optional[str]` — Optional note for the API key - -
-
- -
-
- -**public_key:** `typing.Optional[str]` — Optional predefined public key. Must start with 'pk-lf-'. If provided, secretKey must also be provided. - -
-
- -
-
- -**secret_key:** `typing.Optional[str]` — Optional predefined secret key. Must start with 'sk-lf-'. If provided, publicKey must also be provided. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.projects.delete_api_key(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete an API key for a project (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.projects.delete_api_key( - project_id="projectId", - api_key_id="apiKeyId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**project_id:** `str` - -
-
- -
-
- -**api_key_id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## PromptVersion -
client.prompt_version.update(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Update labels for a specific prompt version -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.prompt_version.update( - name="name", - version=1, - new_labels=["newLabels", "newLabels"], -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**name:** `str` — The name of the prompt - -
-
- -
-
- -**version:** `int` — Version of the prompt to update - -
-
- -
-
- -**new_labels:** `typing.Sequence[str]` — New labels for the prompt version. Labels are unique across versions. The "latest" label is reserved and managed by Langfuse. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Prompts -
client.prompts.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a prompt -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.prompts.get( - prompt_name="promptName", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**prompt_name:** `str` — The name of the prompt - -
-
- -
-
- -**version:** `typing.Optional[int]` — Version of the prompt to be retrieved. - -
-
- -
-
- -**label:** `typing.Optional[str]` — Label of the prompt to be retrieved. Defaults to "production" if no label or version is set. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.prompts.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a list of prompt names with versions and labels -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.prompts.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**name:** `typing.Optional[str]` - -
-
- -
-
- -**label:** `typing.Optional[str]` - -
-
- -
-
- -**tag:** `typing.Optional[str]` - -
-
- -
-
- -**page:** `typing.Optional[int]` — page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — limit of items per page - -
-
- -
-
- -**from_updated_at:** `typing.Optional[dt.datetime]` — Optional filter to only include prompt versions created/updated on or after a certain datetime (ISO 8601) - -
-
- -
-
- -**to_updated_at:** `typing.Optional[dt.datetime]` — Optional filter to only include prompt versions created/updated before a certain datetime (ISO 8601) - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.prompts.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a new version for the prompt with the given `name` -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import ( - ChatMessageWithPlaceholders_Chatmessage, - CreatePromptRequest_Chat, -) -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.prompts.create( - request=CreatePromptRequest_Chat( - name="name", - prompt=[ - ChatMessageWithPlaceholders_Chatmessage( - role="role", - content="content", - ), - ChatMessageWithPlaceholders_Chatmessage( - role="role", - content="content", - ), - ], - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreatePromptRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Scim -
client.scim.get_service_provider_config() -
-
- -#### 📝 Description - -
-
- -
-
- -Get SCIM Service Provider Configuration (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.get_service_provider_config() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.scim.get_resource_types() -
-
- -#### 📝 Description - -
-
- -
-
- -Get SCIM Resource Types (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.get_resource_types() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.scim.get_schemas() -
-
- -#### 📝 Description - -
-
- -
-
- -Get SCIM Schemas (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.get_schemas() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.scim.list_users(...) -
-
- -#### 📝 Description - -
-
- -
-
- -List users in the organization (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.list_users() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**filter:** `typing.Optional[str]` — Filter expression (e.g. userName eq "value") - -
-
- -
-
- -**start_index:** `typing.Optional[int]` — 1-based index of the first result to return (default 1) - -
-
- -
-
- -**count:** `typing.Optional[int]` — Maximum number of results to return (default 100) - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.scim.create_user(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a new user in the organization (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import ScimName -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.create_user( - user_name="userName", - name=ScimName(), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**user_name:** `str` — User's email address (required) - -
-
- -
-
- -**name:** `ScimName` — User's name information - -
-
- -
-
- -**emails:** `typing.Optional[typing.Sequence[ScimEmail]]` — User's email addresses - -
-
- -
-
- -**active:** `typing.Optional[bool]` — Whether the user is active - -
-
- -
-
- -**password:** `typing.Optional[str]` — Initial password for the user - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.scim.get_user(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a specific user by ID (requires organization-scoped API key) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.get_user( - user_id="userId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**user_id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.scim.delete_user(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Remove a user from the organization (requires organization-scoped API key). Note that this only removes the user from the organization but does not delete the user entity itself. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.scim.delete_user( - user_id="userId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**user_id:** `str` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## ScoreConfigs -
client.score_configs.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a score configuration (config). Score configs are used to define the structure of scores -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateScoreConfigRequest, ScoreDataType -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score_configs.create( - request=CreateScoreConfigRequest( - name="name", - data_type=ScoreDataType.NUMERIC, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateScoreConfigRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.score_configs.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get all score configs -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score_configs.get() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — Page number, starts at 1. - -
-
- -
-
- -**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.score_configs.get_by_id(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a score config -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score_configs.get_by_id( - config_id="configId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**config_id:** `str` — The unique langfuse identifier of a score config - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.score_configs.update(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Update a score config -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import UpdateScoreConfigRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score_configs.update( - config_id="configId", - request=UpdateScoreConfigRequest(), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**config_id:** `str` — The unique langfuse identifier of a score config - -
-
- -
-
- -**request:** `UpdateScoreConfigRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## ScoreV2 -
client.score_v_2.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a list of scores (supports both trace and session scores) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score_v_2.get() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — Page number, starts at 1. - -
-
- -
-
- -**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - -
-
- -
-
- -**user_id:** `typing.Optional[str]` — Retrieve only scores with this userId associated to the trace. - -
-
- -
-
- -**name:** `typing.Optional[str]` — Retrieve only scores with this name. - -
-
- -
-
- -**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include scores created on or after a certain datetime (ISO 8601) - -
-
- -
-
- -**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include scores created before a certain datetime (ISO 8601) - -
-
- -
-
- -**environment:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Optional filter for scores where the environment is one of the provided values. - -
-
- -
-
- -**source:** `typing.Optional[ScoreSource]` — Retrieve only scores from a specific source. - -
-
- -
-
- -**operator:** `typing.Optional[str]` — Retrieve only scores with value. - -
-
- -
-
- -**value:** `typing.Optional[float]` — Retrieve only scores with value. - -
-
- -
-
- -**score_ids:** `typing.Optional[str]` — Comma-separated list of score IDs to limit the results to. - -
-
- -
-
- -**config_id:** `typing.Optional[str]` — Retrieve only scores with a specific configId. - -
-
- -
-
- -**session_id:** `typing.Optional[str]` — Retrieve only scores with a specific sessionId. - -
-
- -
-
- -**queue_id:** `typing.Optional[str]` — Retrieve only scores with a specific annotation queueId. - -
-
- -
-
- -**data_type:** `typing.Optional[ScoreDataType]` — Retrieve only scores with a specific dataType. - -
-
- -
-
- -**trace_tags:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Only scores linked to traces that include all of these tags will be returned. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.score_v_2.get_by_id(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a score (supports both trace and session scores) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score_v_2.get_by_id( - score_id="scoreId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**score_id:** `str` — The unique langfuse identifier of a score - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Score -
client.score.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a score (supports both trace and session scores) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse import CreateScoreRequest -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score.create( - request=CreateScoreRequest( - name="name", - value=1.1, - ), -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `CreateScoreRequest` - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.score.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a score (supports both trace and session scores) -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.score.delete( - score_id="scoreId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**score_id:** `str` — The unique langfuse identifier of a score - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Sessions -
client.sessions.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get sessions -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.sessions.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — Page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - -
-
- -
-
- -**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include sessions created on or after a certain datetime (ISO 8601) - -
-
- -
-
- -**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include sessions created before a certain datetime (ISO 8601) - -
-
- -
-
- -**environment:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Optional filter for sessions where the environment is one of the provided values. - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.sessions.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a session. Please note that `traces` on this endpoint are not paginated, if you plan to fetch large sessions, consider `GET /api/public/traces?sessionId=` -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.sessions.get( - session_id="sessionId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**session_id:** `str` — The unique id of a session - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -## Trace -
client.trace.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get a specific trace -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.trace.get( - trace_id="traceId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**trace_id:** `str` — The unique langfuse identifier of a trace - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.trace.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a specific trace -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.trace.delete( - trace_id="traceId", -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**trace_id:** `str` — The unique langfuse identifier of the trace to delete - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.trace.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get list of traces -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.trace.list() - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**page:** `typing.Optional[int]` — Page number, starts at 1 - -
-
- -
-
- -**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - -
-
- -
-
- -**user_id:** `typing.Optional[str]` - -
-
- -
-
- -**name:** `typing.Optional[str]` - -
-
- -
-
- -**session_id:** `typing.Optional[str]` - -
-
- -
-
- -**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include traces with a trace.timestamp on or after a certain datetime (ISO 8601) - -
-
- -
-
- -**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include traces with a trace.timestamp before a certain datetime (ISO 8601) - -
-
- -
-
- -**order_by:** `typing.Optional[str]` — Format of the string [field].[asc/desc]. Fields: id, timestamp, name, userId, release, version, public, bookmarked, sessionId. Example: timestamp.asc - -
-
- -
-
- -**tags:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Only traces that include all of these tags will be returned. - -
-
- -
-
- -**version:** `typing.Optional[str]` — Optional filter to only include traces with a certain version. - -
-
- -
-
- -**release:** `typing.Optional[str]` — Optional filter to only include traces with a certain release. - -
-
- -
-
- -**environment:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Optional filter for traces where the environment is one of the provided values. - -
-
- -
-
- -**fields:** `typing.Optional[str]` — Comma-separated list of fields to include in the response. Available field groups: 'core' (always included), 'io' (input, output, metadata), 'scores', 'observations', 'metrics'. If not specified, all fields are returned. Example: 'core,scores,metrics'. Note: Excluded 'observations' or 'scores' fields return empty arrays; excluded 'metrics' returns -1 for 'totalCost' and 'latency'. - -
-
- -
-
- -**filter:** `typing.Optional[str]` - -JSON string containing an array of filter conditions. When provided, this takes precedence over query parameter filters (userId, name, sessionId, tags, version, release, environment, fromTimestamp, toTimestamp). - -## Filter Structure -Each filter condition has the following structure: -```json -[ - { - "type": string, // Required. One of: "datetime", "string", "number", "stringOptions", "categoryOptions", "arrayOptions", "stringObject", "numberObject", "boolean", "null" - "column": string, // Required. Column to filter on (see available columns below) - "operator": string, // Required. Operator based on type: - // - datetime: ">", "<", ">=", "<=" - // - string: "=", "contains", "does not contain", "starts with", "ends with" - // - stringOptions: "any of", "none of" - // - categoryOptions: "any of", "none of" - // - arrayOptions: "any of", "none of", "all of" - // - number: "=", ">", "<", ">=", "<=" - // - stringObject: "=", "contains", "does not contain", "starts with", "ends with" - // - numberObject: "=", ">", "<", ">=", "<=" - // - boolean: "=", "<>" - // - null: "is null", "is not null" - "value": any, // Required (except for null type). Value to compare against. Type depends on filter type - "key": string // Required only for stringObject, numberObject, and categoryOptions types when filtering on nested fields like metadata - } -] -``` - -## Available Columns - -### Core Trace Fields -- `id` (string) - Trace ID -- `name` (string) - Trace name -- `timestamp` (datetime) - Trace timestamp -- `userId` (string) - User ID -- `sessionId` (string) - Session ID -- `environment` (string) - Environment tag -- `version` (string) - Version tag -- `release` (string) - Release tag -- `tags` (arrayOptions) - Array of tags -- `bookmarked` (boolean) - Bookmark status - -### Structured Data -- `metadata` (stringObject/numberObject/categoryOptions) - Metadata key-value pairs. Use `key` parameter to filter on specific metadata keys. - -### Aggregated Metrics (from observations) -These metrics are aggregated from all observations within the trace: -- `latency` (number) - Latency in seconds (time from first observation start to last observation end) -- `inputTokens` (number) - Total input tokens across all observations -- `outputTokens` (number) - Total output tokens across all observations -- `totalTokens` (number) - Total tokens (alias: `tokens`) -- `inputCost` (number) - Total input cost in USD -- `outputCost` (number) - Total output cost in USD -- `totalCost` (number) - Total cost in USD - -### Observation Level Aggregations -These fields aggregate observation levels within the trace: -- `level` (string) - Highest severity level (ERROR > WARNING > DEFAULT > DEBUG) -- `warningCount` (number) - Count of WARNING level observations -- `errorCount` (number) - Count of ERROR level observations -- `defaultCount` (number) - Count of DEFAULT level observations -- `debugCount` (number) - Count of DEBUG level observations - -### Scores (requires join with scores table) -- `scores_avg` (number) - Average of numeric scores (alias: `scores`) -- `score_categories` (categoryOptions) - Categorical score values - -## Filter Examples -```json -[ - { - "type": "datetime", - "column": "timestamp", - "operator": ">=", - "value": "2024-01-01T00:00:00Z" - }, - { - "type": "string", - "column": "userId", - "operator": "=", - "value": "user-123" - }, - { - "type": "number", - "column": "totalCost", - "operator": ">=", - "value": 0.01 - }, - { - "type": "arrayOptions", - "column": "tags", - "operator": "all of", - "value": ["production", "critical"] - }, - { - "type": "stringObject", - "column": "metadata", - "key": "customer_tier", - "operator": "=", - "value": "enterprise" - } -] -``` - -## Performance Notes -- Filtering on `userId`, `sessionId`, or `metadata` may enable skip indexes for better query performance -- Score filters require a join with the scores table and may impact query performance - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.trace.delete_multiple(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete multiple traces -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from langfuse.client import FernLangfuse - -client = FernLangfuse( - x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", - x_langfuse_sdk_version="YOUR_X_LANGFUSE_SDK_VERSION", - x_langfuse_public_key="YOUR_X_LANGFUSE_PUBLIC_KEY", - username="YOUR_USERNAME", - password="YOUR_PASSWORD", - base_url="https://yourhost.com/path/to/api", -) -client.trace.delete_multiple( - trace_ids=["traceIds", "traceIds"], -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**trace_ids:** `typing.Sequence[str]` — List of trace IDs to delete - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- diff --git a/langfuse/api/resources/__init__.py b/langfuse/api/resources/__init__.py index b3a6cc31a..56b4b5554 100644 --- a/langfuse/api/resources/__init__.py +++ b/langfuse/api/resources/__init__.py @@ -1,271 +1,544 @@ # This file was auto-generated by Fern from our API Definition. -from . import ( - annotation_queues, - blob_storage_integrations, - comments, - commons, - dataset_items, - dataset_run_items, - datasets, - health, - ingestion, - llm_connections, - media, - metrics, - models, - observations, - opentelemetry, - organizations, - projects, - prompt_version, - prompts, - scim, - score, - score_configs, - score_v_2, - sessions, - trace, - utils, -) -from .annotation_queues import ( - AnnotationQueue, - AnnotationQueueAssignmentRequest, - AnnotationQueueItem, - AnnotationQueueObjectType, - AnnotationQueueStatus, - CreateAnnotationQueueAssignmentResponse, - CreateAnnotationQueueItemRequest, - CreateAnnotationQueueRequest, - DeleteAnnotationQueueAssignmentResponse, - DeleteAnnotationQueueItemResponse, - PaginatedAnnotationQueueItems, - PaginatedAnnotationQueues, - UpdateAnnotationQueueItemRequest, -) -from .blob_storage_integrations import ( - BlobStorageExportFrequency, - BlobStorageExportMode, - BlobStorageIntegrationDeletionResponse, - BlobStorageIntegrationFileType, - BlobStorageIntegrationResponse, - BlobStorageIntegrationType, - BlobStorageIntegrationsResponse, - CreateBlobStorageIntegrationRequest, -) -from .comments import CreateCommentRequest, CreateCommentResponse, GetCommentsResponse -from .commons import ( - AccessDeniedError, - BaseScore, - BaseScoreV1, - BooleanScore, - BooleanScoreV1, - CategoricalScore, - CategoricalScoreV1, - Comment, - CommentObjectType, - ConfigCategory, - CreateScoreValue, - Dataset, - DatasetItem, - DatasetRun, - DatasetRunItem, - DatasetRunWithItems, - DatasetStatus, - Error, - MapValue, - MethodNotAllowedError, - Model, - ModelPrice, - ModelUsageUnit, - NotFoundError, - NumericScore, - NumericScoreV1, - Observation, - ObservationLevel, - ObservationsView, - Score, - ScoreConfig, - ScoreDataType, - ScoreSource, - ScoreV1, - ScoreV1_Boolean, - ScoreV1_Categorical, - ScoreV1_Numeric, - Score_Boolean, - Score_Categorical, - Score_Numeric, - Session, - SessionWithTraces, - Trace, - TraceWithDetails, - TraceWithFullDetails, - UnauthorizedError, - Usage, -) -from .dataset_items import ( - CreateDatasetItemRequest, - DeleteDatasetItemResponse, - PaginatedDatasetItems, -) -from .dataset_run_items import CreateDatasetRunItemRequest, PaginatedDatasetRunItems -from .datasets import ( - CreateDatasetRequest, - DeleteDatasetRunResponse, - PaginatedDatasetRuns, - PaginatedDatasets, -) -from .health import HealthResponse, ServiceUnavailableError -from .ingestion import ( - BaseEvent, - CreateEventBody, - CreateEventEvent, - CreateGenerationBody, - CreateGenerationEvent, - CreateObservationEvent, - CreateSpanBody, - CreateSpanEvent, - IngestionError, - IngestionEvent, - IngestionEvent_EventCreate, - IngestionEvent_GenerationCreate, - IngestionEvent_GenerationUpdate, - IngestionEvent_ObservationCreate, - IngestionEvent_ObservationUpdate, - IngestionEvent_ScoreCreate, - IngestionEvent_SdkLog, - IngestionEvent_SpanCreate, - IngestionEvent_SpanUpdate, - IngestionEvent_TraceCreate, - IngestionResponse, - IngestionSuccess, - IngestionUsage, - ObservationBody, - ObservationType, - OpenAiCompletionUsageSchema, - OpenAiResponseUsageSchema, - OpenAiUsage, - OptionalObservationBody, - ScoreBody, - ScoreEvent, - SdkLogBody, - SdkLogEvent, - TraceBody, - TraceEvent, - UpdateEventBody, - UpdateGenerationBody, - UpdateGenerationEvent, - UpdateObservationEvent, - UpdateSpanBody, - UpdateSpanEvent, - UsageDetails, -) -from .llm_connections import ( - LlmAdapter, - LlmConnection, - PaginatedLlmConnections, - UpsertLlmConnectionRequest, -) -from .media import ( - GetMediaResponse, - GetMediaUploadUrlRequest, - GetMediaUploadUrlResponse, - MediaContentType, - PatchMediaBody, -) -from .metrics import MetricsResponse -from .models import CreateModelRequest, PaginatedModels -from .observations import Observations, ObservationsViews -from .opentelemetry import ( - OtelAttribute, - OtelAttributeValue, - OtelResource, - OtelResourceSpan, - OtelScope, - OtelScopeSpan, - OtelSpan, - OtelTraceResponse, -) -from .organizations import ( - DeleteMembershipRequest, - MembershipDeletionResponse, - MembershipRequest, - MembershipResponse, - MembershipRole, - MembershipsResponse, - OrganizationApiKey, - OrganizationApiKeysResponse, - OrganizationProject, - OrganizationProjectsResponse, -) -from .projects import ( - ApiKeyDeletionResponse, - ApiKeyList, - ApiKeyResponse, - ApiKeySummary, - Project, - ProjectDeletionResponse, - Projects, -) -from .prompts import ( - BasePrompt, - ChatMessage, - ChatMessageWithPlaceholders, - ChatMessageWithPlaceholders_Chatmessage, - ChatMessageWithPlaceholders_Placeholder, - ChatPrompt, - CreateChatPromptRequest, - CreatePromptRequest, - CreatePromptRequest_Chat, - CreatePromptRequest_Text, - CreateTextPromptRequest, - PlaceholderMessage, - Prompt, - PromptMeta, - PromptMetaListResponse, - PromptType, - Prompt_Chat, - Prompt_Text, - TextPrompt, -) -from .scim import ( - AuthenticationScheme, - BulkConfig, - EmptyResponse, - FilterConfig, - ResourceMeta, - ResourceType, - ResourceTypesResponse, - SchemaExtension, - SchemaResource, - SchemasResponse, - ScimEmail, - ScimFeatureSupport, - ScimName, - ScimUser, - ScimUsersListResponse, - ServiceProviderConfig, - UserMeta, -) -from .score import CreateScoreRequest, CreateScoreResponse -from .score_configs import ( - CreateScoreConfigRequest, - ScoreConfigs, - UpdateScoreConfigRequest, -) -from .score_v_2 import ( - GetScoresResponse, - GetScoresResponseData, - GetScoresResponseDataBoolean, - GetScoresResponseDataCategorical, - GetScoresResponseDataNumeric, - GetScoresResponseData_Boolean, - GetScoresResponseData_Categorical, - GetScoresResponseData_Numeric, - GetScoresResponseTraceData, -) -from .sessions import PaginatedSessions -from .trace import DeleteTraceResponse, Sort, Traces +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from . import ( + annotation_queues, + blob_storage_integrations, + comments, + commons, + dataset_items, + dataset_run_items, + datasets, + health, + ingestion, + llm_connections, + media, + metrics, + models, + observations, + opentelemetry, + organizations, + projects, + prompt_version, + prompts, + scim, + score, + score_configs, + score_v_2, + sessions, + trace, + utils, + ) + from .annotation_queues import ( + AnnotationQueue, + AnnotationQueueAssignmentRequest, + AnnotationQueueItem, + AnnotationQueueObjectType, + AnnotationQueueStatus, + CreateAnnotationQueueAssignmentResponse, + CreateAnnotationQueueItemRequest, + CreateAnnotationQueueRequest, + DeleteAnnotationQueueAssignmentResponse, + DeleteAnnotationQueueItemResponse, + PaginatedAnnotationQueueItems, + PaginatedAnnotationQueues, + UpdateAnnotationQueueItemRequest, + ) + from .blob_storage_integrations import ( + BlobStorageExportFrequency, + BlobStorageExportMode, + BlobStorageIntegrationDeletionResponse, + BlobStorageIntegrationFileType, + BlobStorageIntegrationResponse, + BlobStorageIntegrationType, + BlobStorageIntegrationsResponse, + CreateBlobStorageIntegrationRequest, + ) + from .comments import ( + CreateCommentRequest, + CreateCommentResponse, + GetCommentsResponse, + ) + from .commons import ( + AccessDeniedError, + BaseScore, + BaseScoreV1, + BooleanScore, + BooleanScoreV1, + CategoricalScore, + CategoricalScoreV1, + Comment, + CommentObjectType, + ConfigCategory, + CreateScoreValue, + Dataset, + DatasetItem, + DatasetRun, + DatasetRunItem, + DatasetRunWithItems, + DatasetStatus, + Error, + MapValue, + MethodNotAllowedError, + Model, + ModelPrice, + ModelUsageUnit, + NotFoundError, + NumericScore, + NumericScoreV1, + Observation, + ObservationLevel, + ObservationsView, + Score, + ScoreConfig, + ScoreDataType, + ScoreSource, + ScoreV1, + Score_Boolean, + Score_Categorical, + Score_Numeric, + Session, + SessionWithTraces, + Trace, + TraceWithDetails, + TraceWithFullDetails, + UnauthorizedError, + Usage, + ) + from .dataset_items import ( + CreateDatasetItemRequest, + DeleteDatasetItemResponse, + PaginatedDatasetItems, + ) + from .dataset_run_items import CreateDatasetRunItemRequest, PaginatedDatasetRunItems + from .datasets import ( + CreateDatasetRequest, + DeleteDatasetRunResponse, + PaginatedDatasetRuns, + PaginatedDatasets, + ) + from .health import HealthResponse, ServiceUnavailableError + from .ingestion import ( + BaseEvent, + CreateEventBody, + CreateEventEvent, + CreateGenerationBody, + CreateGenerationEvent, + CreateObservationEvent, + CreateSpanBody, + CreateSpanEvent, + IngestionError, + IngestionEvent, + IngestionEvent_EventCreate, + IngestionEvent_GenerationCreate, + IngestionEvent_GenerationUpdate, + IngestionEvent_ObservationCreate, + IngestionEvent_ObservationUpdate, + IngestionEvent_ScoreCreate, + IngestionEvent_SdkLog, + IngestionEvent_SpanCreate, + IngestionEvent_SpanUpdate, + IngestionEvent_TraceCreate, + IngestionResponse, + IngestionSuccess, + IngestionUsage, + ObservationBody, + ObservationType, + OpenAiCompletionUsageSchema, + OpenAiResponseUsageSchema, + OpenAiUsage, + OptionalObservationBody, + ScoreBody, + ScoreEvent, + SdkLogBody, + SdkLogEvent, + TraceBody, + TraceEvent, + UpdateEventBody, + UpdateGenerationBody, + UpdateGenerationEvent, + UpdateObservationEvent, + UpdateSpanBody, + UpdateSpanEvent, + UsageDetails, + ) + from .llm_connections import ( + LlmAdapter, + LlmConnection, + PaginatedLlmConnections, + UpsertLlmConnectionRequest, + ) + from .media import ( + GetMediaResponse, + GetMediaUploadUrlRequest, + GetMediaUploadUrlResponse, + MediaContentType, + PatchMediaBody, + ) + from .metrics import MetricsResponse + from .models import CreateModelRequest, PaginatedModels + from .observations import Observations, ObservationsViews + from .opentelemetry import ( + OtelAttribute, + OtelAttributeValue, + OtelResource, + OtelResourceSpan, + OtelScope, + OtelScopeSpan, + OtelSpan, + OtelTraceResponse, + ) + from .organizations import ( + DeleteMembershipRequest, + MembershipDeletionResponse, + MembershipRequest, + MembershipResponse, + MembershipRole, + MembershipsResponse, + OrganizationApiKey, + OrganizationApiKeysResponse, + OrganizationProject, + OrganizationProjectsResponse, + ) + from .projects import ( + ApiKeyDeletionResponse, + ApiKeyList, + ApiKeyResponse, + ApiKeySummary, + Project, + ProjectDeletionResponse, + Projects, + ) + from .prompts import ( + BasePrompt, + ChatMessage, + ChatMessageWithPlaceholders, + ChatPrompt, + CreateChatPromptRequest, + CreatePromptRequest, + CreatePromptRequest_Chat, + CreatePromptRequest_Text, + CreateTextPromptRequest, + PlaceholderMessage, + Prompt, + PromptMeta, + PromptMetaListResponse, + PromptType, + Prompt_Chat, + Prompt_Text, + TextPrompt, + ) + from .scim import ( + AuthenticationScheme, + BulkConfig, + EmptyResponse, + FilterConfig, + ResourceMeta, + ResourceType, + ResourceTypesResponse, + SchemaExtension, + SchemaResource, + SchemasResponse, + ScimEmail, + ScimFeatureSupport, + ScimName, + ScimUser, + ScimUsersListResponse, + ServiceProviderConfig, + UserMeta, + ) + from .score import CreateScoreRequest, CreateScoreResponse + from .score_configs import ( + CreateScoreConfigRequest, + ScoreConfigs, + UpdateScoreConfigRequest, + ) + from .score_v_2 import ( + GetScoresResponse, + GetScoresResponseData, + GetScoresResponseDataBoolean, + GetScoresResponseDataCategorical, + GetScoresResponseDataNumeric, + GetScoresResponseData_Boolean, + GetScoresResponseData_Categorical, + GetScoresResponseData_Numeric, + GetScoresResponseTraceData, + ) + from .sessions import PaginatedSessions + from .trace import DeleteTraceResponse, Sort, Traces +_dynamic_imports: typing.Dict[str, str] = { + "AccessDeniedError": ".commons", + "AnnotationQueue": ".annotation_queues", + "AnnotationQueueAssignmentRequest": ".annotation_queues", + "AnnotationQueueItem": ".annotation_queues", + "AnnotationQueueObjectType": ".annotation_queues", + "AnnotationQueueStatus": ".annotation_queues", + "ApiKeyDeletionResponse": ".projects", + "ApiKeyList": ".projects", + "ApiKeyResponse": ".projects", + "ApiKeySummary": ".projects", + "AuthenticationScheme": ".scim", + "BaseEvent": ".ingestion", + "BasePrompt": ".prompts", + "BaseScore": ".commons", + "BaseScoreV1": ".commons", + "BlobStorageExportFrequency": ".blob_storage_integrations", + "BlobStorageExportMode": ".blob_storage_integrations", + "BlobStorageIntegrationDeletionResponse": ".blob_storage_integrations", + "BlobStorageIntegrationFileType": ".blob_storage_integrations", + "BlobStorageIntegrationResponse": ".blob_storage_integrations", + "BlobStorageIntegrationType": ".blob_storage_integrations", + "BlobStorageIntegrationsResponse": ".blob_storage_integrations", + "BooleanScore": ".commons", + "BooleanScoreV1": ".commons", + "BulkConfig": ".scim", + "CategoricalScore": ".commons", + "CategoricalScoreV1": ".commons", + "ChatMessage": ".prompts", + "ChatMessageWithPlaceholders": ".prompts", + "ChatPrompt": ".prompts", + "Comment": ".commons", + "CommentObjectType": ".commons", + "ConfigCategory": ".commons", + "CreateAnnotationQueueAssignmentResponse": ".annotation_queues", + "CreateAnnotationQueueItemRequest": ".annotation_queues", + "CreateAnnotationQueueRequest": ".annotation_queues", + "CreateBlobStorageIntegrationRequest": ".blob_storage_integrations", + "CreateChatPromptRequest": ".prompts", + "CreateCommentRequest": ".comments", + "CreateCommentResponse": ".comments", + "CreateDatasetItemRequest": ".dataset_items", + "CreateDatasetRequest": ".datasets", + "CreateDatasetRunItemRequest": ".dataset_run_items", + "CreateEventBody": ".ingestion", + "CreateEventEvent": ".ingestion", + "CreateGenerationBody": ".ingestion", + "CreateGenerationEvent": ".ingestion", + "CreateModelRequest": ".models", + "CreateObservationEvent": ".ingestion", + "CreatePromptRequest": ".prompts", + "CreatePromptRequest_Chat": ".prompts", + "CreatePromptRequest_Text": ".prompts", + "CreateScoreConfigRequest": ".score_configs", + "CreateScoreRequest": ".score", + "CreateScoreResponse": ".score", + "CreateScoreValue": ".commons", + "CreateSpanBody": ".ingestion", + "CreateSpanEvent": ".ingestion", + "CreateTextPromptRequest": ".prompts", + "Dataset": ".commons", + "DatasetItem": ".commons", + "DatasetRun": ".commons", + "DatasetRunItem": ".commons", + "DatasetRunWithItems": ".commons", + "DatasetStatus": ".commons", + "DeleteAnnotationQueueAssignmentResponse": ".annotation_queues", + "DeleteAnnotationQueueItemResponse": ".annotation_queues", + "DeleteDatasetItemResponse": ".dataset_items", + "DeleteDatasetRunResponse": ".datasets", + "DeleteMembershipRequest": ".organizations", + "DeleteTraceResponse": ".trace", + "EmptyResponse": ".scim", + "Error": ".commons", + "FilterConfig": ".scim", + "GetCommentsResponse": ".comments", + "GetMediaResponse": ".media", + "GetMediaUploadUrlRequest": ".media", + "GetMediaUploadUrlResponse": ".media", + "GetScoresResponse": ".score_v_2", + "GetScoresResponseData": ".score_v_2", + "GetScoresResponseDataBoolean": ".score_v_2", + "GetScoresResponseDataCategorical": ".score_v_2", + "GetScoresResponseDataNumeric": ".score_v_2", + "GetScoresResponseData_Boolean": ".score_v_2", + "GetScoresResponseData_Categorical": ".score_v_2", + "GetScoresResponseData_Numeric": ".score_v_2", + "GetScoresResponseTraceData": ".score_v_2", + "HealthResponse": ".health", + "IngestionError": ".ingestion", + "IngestionEvent": ".ingestion", + "IngestionEvent_EventCreate": ".ingestion", + "IngestionEvent_GenerationCreate": ".ingestion", + "IngestionEvent_GenerationUpdate": ".ingestion", + "IngestionEvent_ObservationCreate": ".ingestion", + "IngestionEvent_ObservationUpdate": ".ingestion", + "IngestionEvent_ScoreCreate": ".ingestion", + "IngestionEvent_SdkLog": ".ingestion", + "IngestionEvent_SpanCreate": ".ingestion", + "IngestionEvent_SpanUpdate": ".ingestion", + "IngestionEvent_TraceCreate": ".ingestion", + "IngestionResponse": ".ingestion", + "IngestionSuccess": ".ingestion", + "IngestionUsage": ".ingestion", + "LlmAdapter": ".llm_connections", + "LlmConnection": ".llm_connections", + "MapValue": ".commons", + "MediaContentType": ".media", + "MembershipDeletionResponse": ".organizations", + "MembershipRequest": ".organizations", + "MembershipResponse": ".organizations", + "MembershipRole": ".organizations", + "MembershipsResponse": ".organizations", + "MethodNotAllowedError": ".commons", + "MetricsResponse": ".metrics", + "Model": ".commons", + "ModelPrice": ".commons", + "ModelUsageUnit": ".commons", + "NotFoundError": ".commons", + "NumericScore": ".commons", + "NumericScoreV1": ".commons", + "Observation": ".commons", + "ObservationBody": ".ingestion", + "ObservationLevel": ".commons", + "ObservationType": ".ingestion", + "Observations": ".observations", + "ObservationsView": ".commons", + "ObservationsViews": ".observations", + "OpenAiCompletionUsageSchema": ".ingestion", + "OpenAiResponseUsageSchema": ".ingestion", + "OpenAiUsage": ".ingestion", + "OptionalObservationBody": ".ingestion", + "OrganizationApiKey": ".organizations", + "OrganizationApiKeysResponse": ".organizations", + "OrganizationProject": ".organizations", + "OrganizationProjectsResponse": ".organizations", + "OtelAttribute": ".opentelemetry", + "OtelAttributeValue": ".opentelemetry", + "OtelResource": ".opentelemetry", + "OtelResourceSpan": ".opentelemetry", + "OtelScope": ".opentelemetry", + "OtelScopeSpan": ".opentelemetry", + "OtelSpan": ".opentelemetry", + "OtelTraceResponse": ".opentelemetry", + "PaginatedAnnotationQueueItems": ".annotation_queues", + "PaginatedAnnotationQueues": ".annotation_queues", + "PaginatedDatasetItems": ".dataset_items", + "PaginatedDatasetRunItems": ".dataset_run_items", + "PaginatedDatasetRuns": ".datasets", + "PaginatedDatasets": ".datasets", + "PaginatedLlmConnections": ".llm_connections", + "PaginatedModels": ".models", + "PaginatedSessions": ".sessions", + "PatchMediaBody": ".media", + "PlaceholderMessage": ".prompts", + "Project": ".projects", + "ProjectDeletionResponse": ".projects", + "Projects": ".projects", + "Prompt": ".prompts", + "PromptMeta": ".prompts", + "PromptMetaListResponse": ".prompts", + "PromptType": ".prompts", + "Prompt_Chat": ".prompts", + "Prompt_Text": ".prompts", + "ResourceMeta": ".scim", + "ResourceType": ".scim", + "ResourceTypesResponse": ".scim", + "SchemaExtension": ".scim", + "SchemaResource": ".scim", + "SchemasResponse": ".scim", + "ScimEmail": ".scim", + "ScimFeatureSupport": ".scim", + "ScimName": ".scim", + "ScimUser": ".scim", + "ScimUsersListResponse": ".scim", + "Score": ".commons", + "ScoreBody": ".ingestion", + "ScoreConfig": ".commons", + "ScoreConfigs": ".score_configs", + "ScoreDataType": ".commons", + "ScoreEvent": ".ingestion", + "ScoreSource": ".commons", + "ScoreV1": ".commons", + "Score_Boolean": ".commons", + "Score_Categorical": ".commons", + "Score_Numeric": ".commons", + "SdkLogBody": ".ingestion", + "SdkLogEvent": ".ingestion", + "ServiceProviderConfig": ".scim", + "ServiceUnavailableError": ".health", + "Session": ".commons", + "SessionWithTraces": ".commons", + "Sort": ".trace", + "TextPrompt": ".prompts", + "Trace": ".commons", + "TraceBody": ".ingestion", + "TraceEvent": ".ingestion", + "TraceWithDetails": ".commons", + "TraceWithFullDetails": ".commons", + "Traces": ".trace", + "UnauthorizedError": ".commons", + "UpdateAnnotationQueueItemRequest": ".annotation_queues", + "UpdateEventBody": ".ingestion", + "UpdateGenerationBody": ".ingestion", + "UpdateGenerationEvent": ".ingestion", + "UpdateObservationEvent": ".ingestion", + "UpdateScoreConfigRequest": ".score_configs", + "UpdateSpanBody": ".ingestion", + "UpdateSpanEvent": ".ingestion", + "UpsertLlmConnectionRequest": ".llm_connections", + "Usage": ".commons", + "UsageDetails": ".ingestion", + "UserMeta": ".scim", + "annotation_queues": ".annotation_queues", + "blob_storage_integrations": ".blob_storage_integrations", + "comments": ".comments", + "commons": ".commons", + "dataset_items": ".dataset_items", + "dataset_run_items": ".dataset_run_items", + "datasets": ".datasets", + "health": ".health", + "ingestion": ".ingestion", + "llm_connections": ".llm_connections", + "media": ".media", + "metrics": ".metrics", + "models": ".models", + "observations": ".observations", + "opentelemetry": ".opentelemetry", + "organizations": ".organizations", + "projects": ".projects", + "prompt_version": ".prompt_version", + "prompts": ".prompts", + "scim": ".scim", + "score": ".score", + "score_configs": ".score_configs", + "score_v_2": ".score_v_2", + "sessions": ".sessions", + "trace": ".trace", + "utils": ".utils", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AccessDeniedError", @@ -297,8 +570,6 @@ "CategoricalScoreV1", "ChatMessage", "ChatMessageWithPlaceholders", - "ChatMessageWithPlaceholders_Chatmessage", - "ChatMessageWithPlaceholders_Placeholder", "ChatPrompt", "Comment", "CommentObjectType", @@ -452,9 +723,6 @@ "ScoreEvent", "ScoreSource", "ScoreV1", - "ScoreV1_Boolean", - "ScoreV1_Categorical", - "ScoreV1_Numeric", "Score_Boolean", "Score_Categorical", "Score_Numeric", diff --git a/langfuse/api/resources/annotation_queues/__init__.py b/langfuse/api/resources/annotation_queues/__init__.py index eed891727..119661e05 100644 --- a/langfuse/api/resources/annotation_queues/__init__.py +++ b/langfuse/api/resources/annotation_queues/__init__.py @@ -1,20 +1,69 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - AnnotationQueue, - AnnotationQueueAssignmentRequest, - AnnotationQueueItem, - AnnotationQueueObjectType, - AnnotationQueueStatus, - CreateAnnotationQueueAssignmentResponse, - CreateAnnotationQueueItemRequest, - CreateAnnotationQueueRequest, - DeleteAnnotationQueueAssignmentResponse, - DeleteAnnotationQueueItemResponse, - PaginatedAnnotationQueueItems, - PaginatedAnnotationQueues, - UpdateAnnotationQueueItemRequest, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + AnnotationQueue, + AnnotationQueueAssignmentRequest, + AnnotationQueueItem, + AnnotationQueueObjectType, + AnnotationQueueStatus, + CreateAnnotationQueueAssignmentResponse, + CreateAnnotationQueueItemRequest, + CreateAnnotationQueueRequest, + DeleteAnnotationQueueAssignmentResponse, + DeleteAnnotationQueueItemResponse, + PaginatedAnnotationQueueItems, + PaginatedAnnotationQueues, + UpdateAnnotationQueueItemRequest, + ) +_dynamic_imports: typing.Dict[str, str] = { + "AnnotationQueue": ".types", + "AnnotationQueueAssignmentRequest": ".types", + "AnnotationQueueItem": ".types", + "AnnotationQueueObjectType": ".types", + "AnnotationQueueStatus": ".types", + "CreateAnnotationQueueAssignmentResponse": ".types", + "CreateAnnotationQueueItemRequest": ".types", + "CreateAnnotationQueueRequest": ".types", + "DeleteAnnotationQueueAssignmentResponse": ".types", + "DeleteAnnotationQueueItemResponse": ".types", + "PaginatedAnnotationQueueItems": ".types", + "PaginatedAnnotationQueues": ".types", + "UpdateAnnotationQueueItemRequest": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AnnotationQueue", diff --git a/langfuse/api/resources/annotation_queues/client.py b/langfuse/api/resources/annotation_queues/client.py index 97c7c2216..811ae2e93 100644 --- a/langfuse/api/resources/annotation_queues/client.py +++ b/langfuse/api/resources/annotation_queues/client.py @@ -1,18 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawAnnotationQueuesClient, RawAnnotationQueuesClient from .types.annotation_queue import AnnotationQueue from .types.annotation_queue_assignment_request import AnnotationQueueAssignmentRequest from .types.annotation_queue_item import AnnotationQueueItem @@ -38,7 +30,18 @@ class AnnotationQueuesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAnnotationQueuesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAnnotationQueuesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAnnotationQueuesClient + """ + return self._raw_client def list_queues( self, @@ -67,7 +70,7 @@ def list_queues( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -79,39 +82,10 @@ def list_queues( ) client.annotation_queues.list_queues() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/annotation-queues", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = self._raw_client.list_queues( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedAnnotationQueues, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_queue( self, @@ -135,8 +109,8 @@ def create_queue( Examples -------- - from langfuse import CreateAnnotationQueueRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.annotation_queues import CreateAnnotationQueueRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -153,38 +127,10 @@ def create_queue( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/annotation-queues", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create_queue( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueue, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_queue( self, queue_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -206,7 +152,7 @@ def get_queue( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -220,36 +166,10 @@ def get_queue( queue_id="queueId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get_queue( + queue_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueue, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def list_queue_items( self, @@ -286,7 +206,7 @@ def list_queue_items( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -300,39 +220,14 @@ def list_queue_items( queue_id="queueId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", - method="GET", - params={"status": status, "page": page, "limit": limit}, + _response = self._raw_client.list_queue_items( + queue_id, + status=status, + page=page, + limit=limit, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedAnnotationQueueItems, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_queue_item( self, @@ -361,7 +256,7 @@ def get_queue_item( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -376,36 +271,10 @@ def get_queue_item( item_id="itemId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get_queue_item( + queue_id, item_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueueItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_queue_item( self, @@ -433,8 +302,10 @@ def create_queue_item( Examples -------- - from langfuse import AnnotationQueueObjectType, CreateAnnotationQueueItemRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.annotation_queues import ( + CreateAnnotationQueueItemRequest, + ) client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -448,42 +319,14 @@ def create_queue_item( queue_id="queueId", request=CreateAnnotationQueueItemRequest( object_id="objectId", - object_type=AnnotationQueueObjectType.TRACE, + object_type="TRACE", ), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create_queue_item( + queue_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueueItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def update_queue_item( self, @@ -515,8 +358,10 @@ def update_queue_item( Examples -------- - from langfuse import UpdateAnnotationQueueItemRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.annotation_queues import ( + UpdateAnnotationQueueItemRequest, + ) client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -532,38 +377,10 @@ def update_queue_item( request=UpdateAnnotationQueueItemRequest(), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", - method="PATCH", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update_queue_item( + queue_id, item_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueueItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_queue_item( self, @@ -592,7 +409,7 @@ def delete_queue_item( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -607,38 +424,10 @@ def delete_queue_item( item_id="itemId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.delete_queue_item( + queue_id, item_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteAnnotationQueueItemResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_queue_assignment( self, @@ -666,8 +455,10 @@ def create_queue_assignment( Examples -------- - from langfuse import AnnotationQueueAssignmentRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.annotation_queues import ( + AnnotationQueueAssignmentRequest, + ) client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -684,40 +475,10 @@ def create_queue_assignment( ), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create_queue_assignment( + queue_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - CreateAnnotationQueueAssignmentResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_queue_assignment( self, @@ -745,8 +506,10 @@ def delete_queue_assignment( Examples -------- - from langfuse import AnnotationQueueAssignmentRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.annotation_queues import ( + AnnotationQueueAssignmentRequest, + ) client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -763,45 +526,26 @@ def delete_queue_assignment( ), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", - method="DELETE", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.delete_queue_assignment( + queue_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteAnnotationQueueAssignmentResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncAnnotationQueuesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAnnotationQueuesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAnnotationQueuesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAnnotationQueuesClient + """ + return self._raw_client async def list_queues( self, @@ -832,7 +576,7 @@ async def list_queues( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -850,39 +594,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/annotation-queues", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = await self._raw_client.list_queues( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedAnnotationQueues, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_queue( self, @@ -908,8 +623,8 @@ async def create_queue( -------- import asyncio - from langfuse import CreateAnnotationQueueRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.annotation_queues import CreateAnnotationQueueRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -932,38 +647,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/annotation-queues", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create_queue( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueue, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_queue( self, queue_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -987,7 +674,7 @@ async def get_queue( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1007,36 +694,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_queue( + queue_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueue, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list_queue_items( self, @@ -1075,7 +736,7 @@ async def list_queue_items( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1095,39 +756,14 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", - method="GET", - params={"status": status, "page": page, "limit": limit}, + _response = await self._raw_client.list_queue_items( + queue_id, + status=status, + page=page, + limit=limit, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedAnnotationQueueItems, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_queue_item( self, @@ -1158,7 +794,7 @@ async def get_queue_item( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1179,36 +815,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_queue_item( + queue_id, item_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueueItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_queue_item( self, @@ -1238,8 +848,10 @@ async def create_queue_item( -------- import asyncio - from langfuse import AnnotationQueueObjectType, CreateAnnotationQueueItemRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.annotation_queues import ( + CreateAnnotationQueueItemRequest, + ) client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1256,45 +868,17 @@ async def main() -> None: queue_id="queueId", request=CreateAnnotationQueueItemRequest( object_id="objectId", - object_type=AnnotationQueueObjectType.TRACE, + object_type="TRACE", ), ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create_queue_item( + queue_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueueItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def update_queue_item( self, @@ -1328,8 +912,10 @@ async def update_queue_item( -------- import asyncio - from langfuse import UpdateAnnotationQueueItemRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.annotation_queues import ( + UpdateAnnotationQueueItemRequest, + ) client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1351,38 +937,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", - method="PATCH", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update_queue_item( + queue_id, item_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AnnotationQueueItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_queue_item( self, @@ -1413,7 +971,7 @@ async def delete_queue_item( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1434,38 +992,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete_queue_item( + queue_id, item_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteAnnotationQueueItemResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_queue_assignment( self, @@ -1495,8 +1025,10 @@ async def create_queue_assignment( -------- import asyncio - from langfuse import AnnotationQueueAssignmentRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.annotation_queues import ( + AnnotationQueueAssignmentRequest, + ) client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1519,40 +1051,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create_queue_assignment( + queue_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - CreateAnnotationQueueAssignmentResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_queue_assignment( self, @@ -1582,8 +1084,10 @@ async def delete_queue_assignment( -------- import asyncio - from langfuse import AnnotationQueueAssignmentRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.annotation_queues import ( + AnnotationQueueAssignmentRequest, + ) client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1606,37 +1110,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", - method="DELETE", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.delete_queue_assignment( + queue_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteAnnotationQueueAssignmentResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/annotation_queues/raw_client.py b/langfuse/api/resources/annotation_queues/raw_client.py new file mode 100644 index 000000000..46e3fe0c7 --- /dev/null +++ b/langfuse/api/resources/annotation_queues/raw_client.py @@ -0,0 +1,2278 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.annotation_queue import AnnotationQueue +from .types.annotation_queue_assignment_request import AnnotationQueueAssignmentRequest +from .types.annotation_queue_item import AnnotationQueueItem +from .types.annotation_queue_status import AnnotationQueueStatus +from .types.create_annotation_queue_assignment_response import ( + CreateAnnotationQueueAssignmentResponse, +) +from .types.create_annotation_queue_item_request import CreateAnnotationQueueItemRequest +from .types.create_annotation_queue_request import CreateAnnotationQueueRequest +from .types.delete_annotation_queue_assignment_response import ( + DeleteAnnotationQueueAssignmentResponse, +) +from .types.delete_annotation_queue_item_response import ( + DeleteAnnotationQueueItemResponse, +) +from .types.paginated_annotation_queue_items import PaginatedAnnotationQueueItems +from .types.paginated_annotation_queues import PaginatedAnnotationQueues +from .types.update_annotation_queue_item_request import UpdateAnnotationQueueItemRequest + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAnnotationQueuesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list_queues( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedAnnotationQueues]: + """ + Get all annotation queues + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedAnnotationQueues] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/annotation-queues", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedAnnotationQueues, + parse_obj_as( + type_=PaginatedAnnotationQueues, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create_queue( + self, + *, + request: CreateAnnotationQueueRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AnnotationQueue]: + """ + Create an annotation queue + + Parameters + ---------- + request : CreateAnnotationQueueRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AnnotationQueue] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/annotation-queues", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateAnnotationQueueRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueue, + parse_obj_as( + type_=AnnotationQueue, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_queue( + self, queue_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AnnotationQueue]: + """ + Get an annotation queue by ID + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AnnotationQueue] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueue, + parse_obj_as( + type_=AnnotationQueue, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list_queue_items( + self, + queue_id: str, + *, + status: typing.Optional[AnnotationQueueStatus] = None, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedAnnotationQueueItems]: + """ + Get items for a specific annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + status : typing.Optional[AnnotationQueueStatus] + Filter by status + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedAnnotationQueueItems] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", + method="GET", + params={ + "status": status, + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedAnnotationQueueItems, + parse_obj_as( + type_=PaginatedAnnotationQueueItems, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_queue_item( + self, + queue_id: str, + item_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AnnotationQueueItem]: + """ + Get a specific item from an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + item_id : str + The unique identifier of the annotation queue item + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AnnotationQueueItem] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueueItem, + parse_obj_as( + type_=AnnotationQueueItem, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create_queue_item( + self, + queue_id: str, + *, + request: CreateAnnotationQueueItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AnnotationQueueItem]: + """ + Add an item to an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request : CreateAnnotationQueueItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AnnotationQueueItem] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateAnnotationQueueItemRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueueItem, + parse_obj_as( + type_=AnnotationQueueItem, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def update_queue_item( + self, + queue_id: str, + item_id: str, + *, + request: UpdateAnnotationQueueItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AnnotationQueueItem]: + """ + Update an annotation queue item + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + item_id : str + The unique identifier of the annotation queue item + + request : UpdateAnnotationQueueItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AnnotationQueueItem] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", + method="PATCH", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=UpdateAnnotationQueueItemRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueueItem, + parse_obj_as( + type_=AnnotationQueueItem, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_queue_item( + self, + queue_id: str, + item_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeleteAnnotationQueueItemResponse]: + """ + Remove an item from an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + item_id : str + The unique identifier of the annotation queue item + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeleteAnnotationQueueItemResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteAnnotationQueueItemResponse, + parse_obj_as( + type_=DeleteAnnotationQueueItemResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create_queue_assignment( + self, + queue_id: str, + *, + request: AnnotationQueueAssignmentRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[CreateAnnotationQueueAssignmentResponse]: + """ + Create an assignment for a user to an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request : AnnotationQueueAssignmentRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[CreateAnnotationQueueAssignmentResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=AnnotationQueueAssignmentRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CreateAnnotationQueueAssignmentResponse, + parse_obj_as( + type_=CreateAnnotationQueueAssignmentResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_queue_assignment( + self, + queue_id: str, + *, + request: AnnotationQueueAssignmentRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeleteAnnotationQueueAssignmentResponse]: + """ + Delete an assignment for a user to an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request : AnnotationQueueAssignmentRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeleteAnnotationQueueAssignmentResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", + method="DELETE", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=AnnotationQueueAssignmentRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteAnnotationQueueAssignmentResponse, + parse_obj_as( + type_=DeleteAnnotationQueueAssignmentResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawAnnotationQueuesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list_queues( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedAnnotationQueues]: + """ + Get all annotation queues + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedAnnotationQueues] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/annotation-queues", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedAnnotationQueues, + parse_obj_as( + type_=PaginatedAnnotationQueues, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create_queue( + self, + *, + request: CreateAnnotationQueueRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AnnotationQueue]: + """ + Create an annotation queue + + Parameters + ---------- + request : CreateAnnotationQueueRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AnnotationQueue] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/annotation-queues", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateAnnotationQueueRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueue, + parse_obj_as( + type_=AnnotationQueue, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_queue( + self, queue_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AnnotationQueue]: + """ + Get an annotation queue by ID + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AnnotationQueue] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueue, + parse_obj_as( + type_=AnnotationQueue, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list_queue_items( + self, + queue_id: str, + *, + status: typing.Optional[AnnotationQueueStatus] = None, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedAnnotationQueueItems]: + """ + Get items for a specific annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + status : typing.Optional[AnnotationQueueStatus] + Filter by status + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedAnnotationQueueItems] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", + method="GET", + params={ + "status": status, + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedAnnotationQueueItems, + parse_obj_as( + type_=PaginatedAnnotationQueueItems, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_queue_item( + self, + queue_id: str, + item_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AnnotationQueueItem]: + """ + Get a specific item from an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + item_id : str + The unique identifier of the annotation queue item + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AnnotationQueueItem] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueueItem, + parse_obj_as( + type_=AnnotationQueueItem, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create_queue_item( + self, + queue_id: str, + *, + request: CreateAnnotationQueueItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AnnotationQueueItem]: + """ + Add an item to an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request : CreateAnnotationQueueItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AnnotationQueueItem] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateAnnotationQueueItemRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueueItem, + parse_obj_as( + type_=AnnotationQueueItem, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def update_queue_item( + self, + queue_id: str, + item_id: str, + *, + request: UpdateAnnotationQueueItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AnnotationQueueItem]: + """ + Update an annotation queue item + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + item_id : str + The unique identifier of the annotation queue item + + request : UpdateAnnotationQueueItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AnnotationQueueItem] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", + method="PATCH", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=UpdateAnnotationQueueItemRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AnnotationQueueItem, + parse_obj_as( + type_=AnnotationQueueItem, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_queue_item( + self, + queue_id: str, + item_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeleteAnnotationQueueItemResponse]: + """ + Remove an item from an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + item_id : str + The unique identifier of the annotation queue item + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeleteAnnotationQueueItemResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/items/{jsonable_encoder(item_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteAnnotationQueueItemResponse, + parse_obj_as( + type_=DeleteAnnotationQueueItemResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create_queue_assignment( + self, + queue_id: str, + *, + request: AnnotationQueueAssignmentRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[CreateAnnotationQueueAssignmentResponse]: + """ + Create an assignment for a user to an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request : AnnotationQueueAssignmentRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[CreateAnnotationQueueAssignmentResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=AnnotationQueueAssignmentRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CreateAnnotationQueueAssignmentResponse, + parse_obj_as( + type_=CreateAnnotationQueueAssignmentResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_queue_assignment( + self, + queue_id: str, + *, + request: AnnotationQueueAssignmentRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeleteAnnotationQueueAssignmentResponse]: + """ + Delete an assignment for a user to an annotation queue + + Parameters + ---------- + queue_id : str + The unique identifier of the annotation queue + + request : AnnotationQueueAssignmentRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeleteAnnotationQueueAssignmentResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/annotation-queues/{jsonable_encoder(queue_id)}/assignments", + method="DELETE", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=AnnotationQueueAssignmentRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteAnnotationQueueAssignmentResponse, + parse_obj_as( + type_=DeleteAnnotationQueueAssignmentResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/annotation_queues/types/__init__.py b/langfuse/api/resources/annotation_queues/types/__init__.py index 9f9ce37dd..0d34bb763 100644 --- a/langfuse/api/resources/annotation_queues/types/__init__.py +++ b/langfuse/api/resources/annotation_queues/types/__init__.py @@ -1,22 +1,71 @@ # This file was auto-generated by Fern from our API Definition. -from .annotation_queue import AnnotationQueue -from .annotation_queue_assignment_request import AnnotationQueueAssignmentRequest -from .annotation_queue_item import AnnotationQueueItem -from .annotation_queue_object_type import AnnotationQueueObjectType -from .annotation_queue_status import AnnotationQueueStatus -from .create_annotation_queue_assignment_response import ( - CreateAnnotationQueueAssignmentResponse, -) -from .create_annotation_queue_item_request import CreateAnnotationQueueItemRequest -from .create_annotation_queue_request import CreateAnnotationQueueRequest -from .delete_annotation_queue_assignment_response import ( - DeleteAnnotationQueueAssignmentResponse, -) -from .delete_annotation_queue_item_response import DeleteAnnotationQueueItemResponse -from .paginated_annotation_queue_items import PaginatedAnnotationQueueItems -from .paginated_annotation_queues import PaginatedAnnotationQueues -from .update_annotation_queue_item_request import UpdateAnnotationQueueItemRequest +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .annotation_queue import AnnotationQueue + from .annotation_queue_assignment_request import AnnotationQueueAssignmentRequest + from .annotation_queue_item import AnnotationQueueItem + from .annotation_queue_object_type import AnnotationQueueObjectType + from .annotation_queue_status import AnnotationQueueStatus + from .create_annotation_queue_assignment_response import ( + CreateAnnotationQueueAssignmentResponse, + ) + from .create_annotation_queue_item_request import CreateAnnotationQueueItemRequest + from .create_annotation_queue_request import CreateAnnotationQueueRequest + from .delete_annotation_queue_assignment_response import ( + DeleteAnnotationQueueAssignmentResponse, + ) + from .delete_annotation_queue_item_response import DeleteAnnotationQueueItemResponse + from .paginated_annotation_queue_items import PaginatedAnnotationQueueItems + from .paginated_annotation_queues import PaginatedAnnotationQueues + from .update_annotation_queue_item_request import UpdateAnnotationQueueItemRequest +_dynamic_imports: typing.Dict[str, str] = { + "AnnotationQueue": ".annotation_queue", + "AnnotationQueueAssignmentRequest": ".annotation_queue_assignment_request", + "AnnotationQueueItem": ".annotation_queue_item", + "AnnotationQueueObjectType": ".annotation_queue_object_type", + "AnnotationQueueStatus": ".annotation_queue_status", + "CreateAnnotationQueueAssignmentResponse": ".create_annotation_queue_assignment_response", + "CreateAnnotationQueueItemRequest": ".create_annotation_queue_item_request", + "CreateAnnotationQueueRequest": ".create_annotation_queue_request", + "DeleteAnnotationQueueAssignmentResponse": ".delete_annotation_queue_assignment_response", + "DeleteAnnotationQueueItemResponse": ".delete_annotation_queue_item_response", + "PaginatedAnnotationQueueItems": ".paginated_annotation_queue_items", + "PaginatedAnnotationQueues": ".paginated_annotation_queues", + "UpdateAnnotationQueueItemRequest": ".update_annotation_queue_item_request", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AnnotationQueue", diff --git a/langfuse/api/resources/annotation_queues/types/annotation_queue.py b/langfuse/api/resources/annotation_queues/types/annotation_queue.py index c4cc23282..a9579c52f 100644 --- a/langfuse/api/resources/annotation_queues/types/annotation_queue.py +++ b/langfuse/api/resources/annotation_queues/types/annotation_queue.py @@ -3,47 +3,33 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class AnnotationQueue(pydantic_v1.BaseModel): +class AnnotationQueue(UniversalBaseModel): id: str name: str description: typing.Optional[str] = None - score_config_ids: typing.List[str] = pydantic_v1.Field(alias="scoreConfigIds") - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + score_config_ids: typing_extensions.Annotated[ + typing.List[str], FieldMetadata(alias="scoreConfigIds") + ] + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/annotation_queue_assignment_request.py b/langfuse/api/resources/annotation_queues/types/annotation_queue_assignment_request.py index aa3980438..f1ee35317 100644 --- a/langfuse/api/resources/annotation_queues/types/annotation_queue_assignment_request.py +++ b/langfuse/api/resources/annotation_queues/types/annotation_queue_assignment_request.py @@ -1,44 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class AnnotationQueueAssignmentRequest(pydantic_v1.BaseModel): - user_id: str = pydantic_v1.Field(alias="userId") +class AnnotationQueueAssignmentRequest(UniversalBaseModel): + user_id: typing_extensions.Annotated[str, FieldMetadata(alias="userId")] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/annotation_queue_item.py b/langfuse/api/resources/annotation_queues/types/annotation_queue_item.py index e88829a1f..bdeb062d9 100644 --- a/langfuse/api/resources/annotation_queues/types/annotation_queue_item.py +++ b/langfuse/api/resources/annotation_queues/types/annotation_queue_item.py @@ -3,53 +3,39 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .annotation_queue_object_type import AnnotationQueueObjectType from .annotation_queue_status import AnnotationQueueStatus -class AnnotationQueueItem(pydantic_v1.BaseModel): +class AnnotationQueueItem(UniversalBaseModel): id: str - queue_id: str = pydantic_v1.Field(alias="queueId") - object_id: str = pydantic_v1.Field(alias="objectId") - object_type: AnnotationQueueObjectType = pydantic_v1.Field(alias="objectType") + queue_id: typing_extensions.Annotated[str, FieldMetadata(alias="queueId")] + object_id: typing_extensions.Annotated[str, FieldMetadata(alias="objectId")] + object_type: typing_extensions.Annotated[ + AnnotationQueueObjectType, FieldMetadata(alias="objectType") + ] status: AnnotationQueueStatus - completed_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="completedAt", default=None - ) - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + completed_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="completedAt") + ] = None + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/annotation_queue_object_type.py b/langfuse/api/resources/annotation_queues/types/annotation_queue_object_type.py index 6e63a7015..8f95334b0 100644 --- a/langfuse/api/resources/annotation_queues/types/annotation_queue_object_type.py +++ b/langfuse/api/resources/annotation_queues/types/annotation_queue_object_type.py @@ -1,25 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class AnnotationQueueObjectType(str, enum.Enum): - TRACE = "TRACE" - OBSERVATION = "OBSERVATION" - SESSION = "SESSION" - - def visit( - self, - trace: typing.Callable[[], T_Result], - observation: typing.Callable[[], T_Result], - session: typing.Callable[[], T_Result], - ) -> T_Result: - if self is AnnotationQueueObjectType.TRACE: - return trace() - if self is AnnotationQueueObjectType.OBSERVATION: - return observation() - if self is AnnotationQueueObjectType.SESSION: - return session() +AnnotationQueueObjectType = typing.Union[ + typing.Literal["TRACE", "OBSERVATION", "SESSION"], typing.Any +] diff --git a/langfuse/api/resources/annotation_queues/types/annotation_queue_status.py b/langfuse/api/resources/annotation_queues/types/annotation_queue_status.py index cf075f38a..0a732adbc 100644 --- a/langfuse/api/resources/annotation_queues/types/annotation_queue_status.py +++ b/langfuse/api/resources/annotation_queues/types/annotation_queue_status.py @@ -1,21 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class AnnotationQueueStatus(str, enum.Enum): - PENDING = "PENDING" - COMPLETED = "COMPLETED" - - def visit( - self, - pending: typing.Callable[[], T_Result], - completed: typing.Callable[[], T_Result], - ) -> T_Result: - if self is AnnotationQueueStatus.PENDING: - return pending() - if self is AnnotationQueueStatus.COMPLETED: - return completed() +AnnotationQueueStatus = typing.Union[typing.Literal["PENDING", "COMPLETED"], typing.Any] diff --git a/langfuse/api/resources/annotation_queues/types/create_annotation_queue_assignment_response.py b/langfuse/api/resources/annotation_queues/types/create_annotation_queue_assignment_response.py index ae6a46862..ce6fc125c 100644 --- a/langfuse/api/resources/annotation_queues/types/create_annotation_queue_assignment_response.py +++ b/langfuse/api/resources/annotation_queues/types/create_annotation_queue_assignment_response.py @@ -1,46 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class CreateAnnotationQueueAssignmentResponse(pydantic_v1.BaseModel): - user_id: str = pydantic_v1.Field(alias="userId") - queue_id: str = pydantic_v1.Field(alias="queueId") - project_id: str = pydantic_v1.Field(alias="projectId") +class CreateAnnotationQueueAssignmentResponse(UniversalBaseModel): + user_id: typing_extensions.Annotated[str, FieldMetadata(alias="userId")] + queue_id: typing_extensions.Annotated[str, FieldMetadata(alias="queueId")] + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/create_annotation_queue_item_request.py b/langfuse/api/resources/annotation_queues/types/create_annotation_queue_item_request.py index cbf257f29..c7bf0ec87 100644 --- a/langfuse/api/resources/annotation_queues/types/create_annotation_queue_item_request.py +++ b/langfuse/api/resources/annotation_queues/types/create_annotation_queue_item_request.py @@ -1,51 +1,32 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .annotation_queue_object_type import AnnotationQueueObjectType from .annotation_queue_status import AnnotationQueueStatus -class CreateAnnotationQueueItemRequest(pydantic_v1.BaseModel): - object_id: str = pydantic_v1.Field(alias="objectId") - object_type: AnnotationQueueObjectType = pydantic_v1.Field(alias="objectType") - status: typing.Optional[AnnotationQueueStatus] = pydantic_v1.Field(default=None) +class CreateAnnotationQueueItemRequest(UniversalBaseModel): + object_id: typing_extensions.Annotated[str, FieldMetadata(alias="objectId")] + object_type: typing_extensions.Annotated[ + AnnotationQueueObjectType, FieldMetadata(alias="objectType") + ] + status: typing.Optional[AnnotationQueueStatus] = pydantic.Field(default=None) """ Defaults to PENDING for new queue items """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/create_annotation_queue_request.py b/langfuse/api/resources/annotation_queues/types/create_annotation_queue_request.py index 7f793cea2..034b1e1b5 100644 --- a/langfuse/api/resources/annotation_queues/types/create_annotation_queue_request.py +++ b/langfuse/api/resources/annotation_queues/types/create_annotation_queue_request.py @@ -1,46 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class CreateAnnotationQueueRequest(pydantic_v1.BaseModel): +class CreateAnnotationQueueRequest(UniversalBaseModel): name: str description: typing.Optional[str] = None - score_config_ids: typing.List[str] = pydantic_v1.Field(alias="scoreConfigIds") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + score_config_ids: typing_extensions.Annotated[ + typing.List[str], FieldMetadata(alias="scoreConfigIds") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_assignment_response.py b/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_assignment_response.py index e348d546c..3d42855c7 100644 --- a/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_assignment_response.py +++ b/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_assignment_response.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DeleteAnnotationQueueAssignmentResponse(pydantic_v1.BaseModel): +class DeleteAnnotationQueueAssignmentResponse(UniversalBaseModel): success: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_item_response.py b/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_item_response.py index a412c85b7..82d7cd81e 100644 --- a/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_item_response.py +++ b/langfuse/api/resources/annotation_queues/types/delete_annotation_queue_item_response.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DeleteAnnotationQueueItemResponse(pydantic_v1.BaseModel): +class DeleteAnnotationQueueItemResponse(UniversalBaseModel): success: bool message: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/paginated_annotation_queue_items.py b/langfuse/api/resources/annotation_queues/types/paginated_annotation_queue_items.py index 587188d89..ee5191917 100644 --- a/langfuse/api/resources/annotation_queues/types/paginated_annotation_queue_items.py +++ b/langfuse/api/resources/annotation_queues/types/paginated_annotation_queue_items.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...utils.resources.pagination.types.meta_response import MetaResponse from .annotation_queue_item import AnnotationQueueItem -class PaginatedAnnotationQueueItems(pydantic_v1.BaseModel): +class PaginatedAnnotationQueueItems(UniversalBaseModel): data: typing.List[AnnotationQueueItem] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/paginated_annotation_queues.py b/langfuse/api/resources/annotation_queues/types/paginated_annotation_queues.py index aba338414..851720a46 100644 --- a/langfuse/api/resources/annotation_queues/types/paginated_annotation_queues.py +++ b/langfuse/api/resources/annotation_queues/types/paginated_annotation_queues.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...utils.resources.pagination.types.meta_response import MetaResponse from .annotation_queue import AnnotationQueue -class PaginatedAnnotationQueues(pydantic_v1.BaseModel): +class PaginatedAnnotationQueues(UniversalBaseModel): data: typing.List[AnnotationQueue] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/annotation_queues/types/update_annotation_queue_item_request.py b/langfuse/api/resources/annotation_queues/types/update_annotation_queue_item_request.py index 3b1c130fe..29d2119b4 100644 --- a/langfuse/api/resources/annotation_queues/types/update_annotation_queue_item_request.py +++ b/langfuse/api/resources/annotation_queues/types/update_annotation_queue_item_request.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotation_queue_status import AnnotationQueueStatus -class UpdateAnnotationQueueItemRequest(pydantic_v1.BaseModel): +class UpdateAnnotationQueueItemRequest(UniversalBaseModel): status: typing.Optional[AnnotationQueueStatus] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/blob_storage_integrations/__init__.py b/langfuse/api/resources/blob_storage_integrations/__init__.py index a635fba57..abd0d9e84 100644 --- a/langfuse/api/resources/blob_storage_integrations/__init__.py +++ b/langfuse/api/resources/blob_storage_integrations/__init__.py @@ -1,15 +1,59 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - BlobStorageExportFrequency, - BlobStorageExportMode, - BlobStorageIntegrationDeletionResponse, - BlobStorageIntegrationFileType, - BlobStorageIntegrationResponse, - BlobStorageIntegrationType, - BlobStorageIntegrationsResponse, - CreateBlobStorageIntegrationRequest, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + BlobStorageExportFrequency, + BlobStorageExportMode, + BlobStorageIntegrationDeletionResponse, + BlobStorageIntegrationFileType, + BlobStorageIntegrationResponse, + BlobStorageIntegrationType, + BlobStorageIntegrationsResponse, + CreateBlobStorageIntegrationRequest, + ) +_dynamic_imports: typing.Dict[str, str] = { + "BlobStorageExportFrequency": ".types", + "BlobStorageExportMode": ".types", + "BlobStorageIntegrationDeletionResponse": ".types", + "BlobStorageIntegrationFileType": ".types", + "BlobStorageIntegrationResponse": ".types", + "BlobStorageIntegrationType": ".types", + "BlobStorageIntegrationsResponse": ".types", + "CreateBlobStorageIntegrationRequest": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BlobStorageExportFrequency", diff --git a/langfuse/api/resources/blob_storage_integrations/client.py b/langfuse/api/resources/blob_storage_integrations/client.py index 73aec4fa4..e4fd79442 100644 --- a/langfuse/api/resources/blob_storage_integrations/client.py +++ b/langfuse/api/resources/blob_storage_integrations/client.py @@ -1,18 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import ( + AsyncRawBlobStorageIntegrationsClient, + RawBlobStorageIntegrationsClient, +) from .types.blob_storage_integration_deletion_response import ( BlobStorageIntegrationDeletionResponse, ) @@ -28,7 +23,20 @@ class BlobStorageIntegrationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawBlobStorageIntegrationsClient( + client_wrapper=client_wrapper + ) + + @property + def with_raw_response(self) -> RawBlobStorageIntegrationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawBlobStorageIntegrationsClient + """ + return self._raw_client def get_blob_storage_integrations( self, *, request_options: typing.Optional[RequestOptions] = None @@ -47,7 +55,7 @@ def get_blob_storage_integrations( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -59,38 +67,10 @@ def get_blob_storage_integrations( ) client.blob_storage_integrations.get_blob_storage_integrations() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/integrations/blob-storage", - method="GET", - request_options=request_options, + _response = self._raw_client.get_blob_storage_integrations( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - BlobStorageIntegrationsResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def upsert_blob_storage_integration( self, @@ -114,14 +94,10 @@ def upsert_blob_storage_integration( Examples -------- - from langfuse import ( - BlobStorageExportFrequency, - BlobStorageExportMode, - BlobStorageIntegrationFileType, - BlobStorageIntegrationType, + from langfuse import FernLangfuse + from langfuse.resources.blob_storage_integrations import ( CreateBlobStorageIntegrationRequest, ) - from langfuse.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -134,51 +110,21 @@ def upsert_blob_storage_integration( client.blob_storage_integrations.upsert_blob_storage_integration( request=CreateBlobStorageIntegrationRequest( project_id="projectId", - type=BlobStorageIntegrationType.S_3, + type="S3", bucket_name="bucketName", region="region", - export_frequency=BlobStorageExportFrequency.HOURLY, + export_frequency="hourly", enabled=True, force_path_style=True, - file_type=BlobStorageIntegrationFileType.JSON, - export_mode=BlobStorageExportMode.FULL_HISTORY, + file_type="JSON", + export_mode="FULL_HISTORY", ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/integrations/blob-storage", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.upsert_blob_storage_integration( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - BlobStorageIntegrationResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_blob_storage_integration( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -199,7 +145,7 @@ def delete_blob_storage_integration( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -213,43 +159,28 @@ def delete_blob_storage_integration( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/integrations/blob-storage/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.delete_blob_storage_integration( + id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - BlobStorageIntegrationDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncBlobStorageIntegrationsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawBlobStorageIntegrationsClient( + client_wrapper=client_wrapper + ) + + @property + def with_raw_response(self) -> AsyncRawBlobStorageIntegrationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawBlobStorageIntegrationsClient + """ + return self._raw_client async def get_blob_storage_integrations( self, *, request_options: typing.Optional[RequestOptions] = None @@ -270,7 +201,7 @@ async def get_blob_storage_integrations( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -288,38 +219,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/integrations/blob-storage", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_blob_storage_integrations( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - BlobStorageIntegrationsResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def upsert_blob_storage_integration( self, @@ -345,14 +248,10 @@ async def upsert_blob_storage_integration( -------- import asyncio - from langfuse import ( - BlobStorageExportFrequency, - BlobStorageExportMode, - BlobStorageIntegrationFileType, - BlobStorageIntegrationType, + from langfuse import AsyncFernLangfuse + from langfuse.resources.blob_storage_integrations import ( CreateBlobStorageIntegrationRequest, ) - from langfuse.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -368,54 +267,24 @@ async def main() -> None: await client.blob_storage_integrations.upsert_blob_storage_integration( request=CreateBlobStorageIntegrationRequest( project_id="projectId", - type=BlobStorageIntegrationType.S_3, + type="S3", bucket_name="bucketName", region="region", - export_frequency=BlobStorageExportFrequency.HOURLY, + export_frequency="hourly", enabled=True, force_path_style=True, - file_type=BlobStorageIntegrationFileType.JSON, - export_mode=BlobStorageExportMode.FULL_HISTORY, + file_type="JSON", + export_mode="FULL_HISTORY", ), ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/integrations/blob-storage", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.upsert_blob_storage_integration( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - BlobStorageIntegrationResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_blob_storage_integration( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -438,7 +307,7 @@ async def delete_blob_storage_integration( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -458,35 +327,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/integrations/blob-storage/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete_blob_storage_integration( + id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - BlobStorageIntegrationDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/blob_storage_integrations/raw_client.py b/langfuse/api/resources/blob_storage_integrations/raw_client.py new file mode 100644 index 000000000..85c39319c --- /dev/null +++ b/langfuse/api/resources/blob_storage_integrations/raw_client.py @@ -0,0 +1,652 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.blob_storage_integration_deletion_response import ( + BlobStorageIntegrationDeletionResponse, +) +from .types.blob_storage_integration_response import BlobStorageIntegrationResponse +from .types.blob_storage_integrations_response import BlobStorageIntegrationsResponse +from .types.create_blob_storage_integration_request import ( + CreateBlobStorageIntegrationRequest, +) + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawBlobStorageIntegrationsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get_blob_storage_integrations( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[BlobStorageIntegrationsResponse]: + """ + Get all blob storage integrations for the organization (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BlobStorageIntegrationsResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/integrations/blob-storage", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BlobStorageIntegrationsResponse, + parse_obj_as( + type_=BlobStorageIntegrationsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def upsert_blob_storage_integration( + self, + *, + request: CreateBlobStorageIntegrationRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BlobStorageIntegrationResponse]: + """ + Create or update a blob storage integration for a specific project (requires organization-scoped API key). The configuration is validated by performing a test upload to the bucket. + + Parameters + ---------- + request : CreateBlobStorageIntegrationRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BlobStorageIntegrationResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/integrations/blob-storage", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateBlobStorageIntegrationRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BlobStorageIntegrationResponse, + parse_obj_as( + type_=BlobStorageIntegrationResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_blob_storage_integration( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[BlobStorageIntegrationDeletionResponse]: + """ + Delete a blob storage integration by ID (requires organization-scoped API key) + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BlobStorageIntegrationDeletionResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/integrations/blob-storage/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BlobStorageIntegrationDeletionResponse, + parse_obj_as( + type_=BlobStorageIntegrationDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawBlobStorageIntegrationsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get_blob_storage_integrations( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[BlobStorageIntegrationsResponse]: + """ + Get all blob storage integrations for the organization (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BlobStorageIntegrationsResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/integrations/blob-storage", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BlobStorageIntegrationsResponse, + parse_obj_as( + type_=BlobStorageIntegrationsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def upsert_blob_storage_integration( + self, + *, + request: CreateBlobStorageIntegrationRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BlobStorageIntegrationResponse]: + """ + Create or update a blob storage integration for a specific project (requires organization-scoped API key). The configuration is validated by performing a test upload to the bucket. + + Parameters + ---------- + request : CreateBlobStorageIntegrationRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BlobStorageIntegrationResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/integrations/blob-storage", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateBlobStorageIntegrationRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BlobStorageIntegrationResponse, + parse_obj_as( + type_=BlobStorageIntegrationResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_blob_storage_integration( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[BlobStorageIntegrationDeletionResponse]: + """ + Delete a blob storage integration by ID (requires organization-scoped API key) + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BlobStorageIntegrationDeletionResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/integrations/blob-storage/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BlobStorageIntegrationDeletionResponse, + parse_obj_as( + type_=BlobStorageIntegrationDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/blob_storage_integrations/types/__init__.py b/langfuse/api/resources/blob_storage_integrations/types/__init__.py index 621196c11..cc19f1a6d 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/__init__.py +++ b/langfuse/api/resources/blob_storage_integrations/types/__init__.py @@ -1,15 +1,61 @@ # This file was auto-generated by Fern from our API Definition. -from .blob_storage_export_frequency import BlobStorageExportFrequency -from .blob_storage_export_mode import BlobStorageExportMode -from .blob_storage_integration_deletion_response import ( - BlobStorageIntegrationDeletionResponse, -) -from .blob_storage_integration_file_type import BlobStorageIntegrationFileType -from .blob_storage_integration_response import BlobStorageIntegrationResponse -from .blob_storage_integration_type import BlobStorageIntegrationType -from .blob_storage_integrations_response import BlobStorageIntegrationsResponse -from .create_blob_storage_integration_request import CreateBlobStorageIntegrationRequest +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .blob_storage_export_frequency import BlobStorageExportFrequency + from .blob_storage_export_mode import BlobStorageExportMode + from .blob_storage_integration_deletion_response import ( + BlobStorageIntegrationDeletionResponse, + ) + from .blob_storage_integration_file_type import BlobStorageIntegrationFileType + from .blob_storage_integration_response import BlobStorageIntegrationResponse + from .blob_storage_integration_type import BlobStorageIntegrationType + from .blob_storage_integrations_response import BlobStorageIntegrationsResponse + from .create_blob_storage_integration_request import ( + CreateBlobStorageIntegrationRequest, + ) +_dynamic_imports: typing.Dict[str, str] = { + "BlobStorageExportFrequency": ".blob_storage_export_frequency", + "BlobStorageExportMode": ".blob_storage_export_mode", + "BlobStorageIntegrationDeletionResponse": ".blob_storage_integration_deletion_response", + "BlobStorageIntegrationFileType": ".blob_storage_integration_file_type", + "BlobStorageIntegrationResponse": ".blob_storage_integration_response", + "BlobStorageIntegrationType": ".blob_storage_integration_type", + "BlobStorageIntegrationsResponse": ".blob_storage_integrations_response", + "CreateBlobStorageIntegrationRequest": ".create_blob_storage_integration_request", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BlobStorageExportFrequency", diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_frequency.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_frequency.py index 936e0c18f..a7307c30e 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_frequency.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_frequency.py @@ -1,25 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class BlobStorageExportFrequency(str, enum.Enum): - HOURLY = "hourly" - DAILY = "daily" - WEEKLY = "weekly" - - def visit( - self, - hourly: typing.Callable[[], T_Result], - daily: typing.Callable[[], T_Result], - weekly: typing.Callable[[], T_Result], - ) -> T_Result: - if self is BlobStorageExportFrequency.HOURLY: - return hourly() - if self is BlobStorageExportFrequency.DAILY: - return daily() - if self is BlobStorageExportFrequency.WEEKLY: - return weekly() +BlobStorageExportFrequency = typing.Union[ + typing.Literal["hourly", "daily", "weekly"], typing.Any +] diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_mode.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_mode.py index 1eafab79d..f1b1d95a3 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_mode.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_export_mode.py @@ -1,25 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class BlobStorageExportMode(str, enum.Enum): - FULL_HISTORY = "FULL_HISTORY" - FROM_TODAY = "FROM_TODAY" - FROM_CUSTOM_DATE = "FROM_CUSTOM_DATE" - - def visit( - self, - full_history: typing.Callable[[], T_Result], - from_today: typing.Callable[[], T_Result], - from_custom_date: typing.Callable[[], T_Result], - ) -> T_Result: - if self is BlobStorageExportMode.FULL_HISTORY: - return full_history() - if self is BlobStorageExportMode.FROM_TODAY: - return from_today() - if self is BlobStorageExportMode.FROM_CUSTOM_DATE: - return from_custom_date() +BlobStorageExportMode = typing.Union[ + typing.Literal["FULL_HISTORY", "FROM_TODAY", "FROM_CUSTOM_DATE"], typing.Any +] diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_deletion_response.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_deletion_response.py index 4305cff2f..38f457122 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_deletion_response.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_deletion_response.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class BlobStorageIntegrationDeletionResponse(pydantic_v1.BaseModel): +class BlobStorageIntegrationDeletionResponse(UniversalBaseModel): message: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_file_type.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_file_type.py index a63631c6f..09e8762ab 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_file_type.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_file_type.py @@ -1,25 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class BlobStorageIntegrationFileType(str, enum.Enum): - JSON = "JSON" - CSV = "CSV" - JSONL = "JSONL" - - def visit( - self, - json: typing.Callable[[], T_Result], - csv: typing.Callable[[], T_Result], - jsonl: typing.Callable[[], T_Result], - ) -> T_Result: - if self is BlobStorageIntegrationFileType.JSON: - return json() - if self is BlobStorageIntegrationFileType.CSV: - return csv() - if self is BlobStorageIntegrationFileType.JSONL: - return jsonl() +BlobStorageIntegrationFileType = typing.Union[ + typing.Literal["JSON", "CSV", "JSONL"], typing.Any +] diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_response.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_response.py index e308e8113..543532ecd 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_response.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_response.py @@ -3,73 +3,63 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .blob_storage_export_frequency import BlobStorageExportFrequency from .blob_storage_export_mode import BlobStorageExportMode from .blob_storage_integration_file_type import BlobStorageIntegrationFileType from .blob_storage_integration_type import BlobStorageIntegrationType -class BlobStorageIntegrationResponse(pydantic_v1.BaseModel): +class BlobStorageIntegrationResponse(UniversalBaseModel): id: str - project_id: str = pydantic_v1.Field(alias="projectId") + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] type: BlobStorageIntegrationType - bucket_name: str = pydantic_v1.Field(alias="bucketName") + bucket_name: typing_extensions.Annotated[str, FieldMetadata(alias="bucketName")] endpoint: typing.Optional[str] = None region: str - access_key_id: typing.Optional[str] = pydantic_v1.Field( - alias="accessKeyId", default=None - ) + access_key_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="accessKeyId") + ] = None prefix: str - export_frequency: BlobStorageExportFrequency = pydantic_v1.Field( - alias="exportFrequency" - ) + export_frequency: typing_extensions.Annotated[ + BlobStorageExportFrequency, FieldMetadata(alias="exportFrequency") + ] enabled: bool - force_path_style: bool = pydantic_v1.Field(alias="forcePathStyle") - file_type: BlobStorageIntegrationFileType = pydantic_v1.Field(alias="fileType") - export_mode: BlobStorageExportMode = pydantic_v1.Field(alias="exportMode") - export_start_date: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="exportStartDate", default=None - ) - next_sync_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="nextSyncAt", default=None - ) - last_sync_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="lastSyncAt", default=None - ) - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + force_path_style: typing_extensions.Annotated[ + bool, FieldMetadata(alias="forcePathStyle") + ] + file_type: typing_extensions.Annotated[ + BlobStorageIntegrationFileType, FieldMetadata(alias="fileType") + ] + export_mode: typing_extensions.Annotated[ + BlobStorageExportMode, FieldMetadata(alias="exportMode") + ] + export_start_date: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="exportStartDate") + ] = None + next_sync_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="nextSyncAt") + ] = None + last_sync_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="lastSyncAt") + ] = None + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_type.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_type.py index 38bacbf85..0df026d37 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_type.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integration_type.py @@ -1,25 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class BlobStorageIntegrationType(str, enum.Enum): - S_3 = "S3" - S_3_COMPATIBLE = "S3_COMPATIBLE" - AZURE_BLOB_STORAGE = "AZURE_BLOB_STORAGE" - - def visit( - self, - s_3: typing.Callable[[], T_Result], - s_3_compatible: typing.Callable[[], T_Result], - azure_blob_storage: typing.Callable[[], T_Result], - ) -> T_Result: - if self is BlobStorageIntegrationType.S_3: - return s_3() - if self is BlobStorageIntegrationType.S_3_COMPATIBLE: - return s_3_compatible() - if self is BlobStorageIntegrationType.AZURE_BLOB_STORAGE: - return azure_blob_storage() +BlobStorageIntegrationType = typing.Union[ + typing.Literal["S3", "S3_COMPATIBLE", "AZURE_BLOB_STORAGE"], typing.Any +] diff --git a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integrations_response.py b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integrations_response.py index c6231a23e..1a69e6dc6 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integrations_response.py +++ b/langfuse/api/resources/blob_storage_integrations/types/blob_storage_integrations_response.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .blob_storage_integration_response import BlobStorageIntegrationResponse -class BlobStorageIntegrationsResponse(pydantic_v1.BaseModel): +class BlobStorageIntegrationsResponse(UniversalBaseModel): data: typing.List[BlobStorageIntegrationResponse] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/blob_storage_integrations/types/create_blob_storage_integration_request.py b/langfuse/api/resources/blob_storage_integrations/types/create_blob_storage_integration_request.py index 31b5779c6..5c083ec88 100644 --- a/langfuse/api/resources/blob_storage_integrations/types/create_blob_storage_integration_request.py +++ b/langfuse/api/resources/blob_storage_integrations/types/create_blob_storage_integration_request.py @@ -3,106 +3,96 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .blob_storage_export_frequency import BlobStorageExportFrequency from .blob_storage_export_mode import BlobStorageExportMode from .blob_storage_integration_file_type import BlobStorageIntegrationFileType from .blob_storage_integration_type import BlobStorageIntegrationType -class CreateBlobStorageIntegrationRequest(pydantic_v1.BaseModel): - project_id: str = pydantic_v1.Field(alias="projectId") +class CreateBlobStorageIntegrationRequest(UniversalBaseModel): + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] = ( + pydantic.Field() + ) """ ID of the project in which to configure the blob storage integration """ type: BlobStorageIntegrationType - bucket_name: str = pydantic_v1.Field(alias="bucketName") + bucket_name: typing_extensions.Annotated[str, FieldMetadata(alias="bucketName")] = ( + pydantic.Field() + ) """ Name of the storage bucket """ - endpoint: typing.Optional[str] = pydantic_v1.Field(default=None) + endpoint: typing.Optional[str] = pydantic.Field(default=None) """ Custom endpoint URL (required for S3_COMPATIBLE type) """ - region: str = pydantic_v1.Field() + region: str = pydantic.Field() """ Storage region """ - access_key_id: typing.Optional[str] = pydantic_v1.Field( - alias="accessKeyId", default=None - ) + access_key_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="accessKeyId") + ] = pydantic.Field(default=None) """ Access key ID for authentication """ - secret_access_key: typing.Optional[str] = pydantic_v1.Field( - alias="secretAccessKey", default=None - ) + secret_access_key: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="secretAccessKey") + ] = pydantic.Field(default=None) """ Secret access key for authentication (will be encrypted when stored) """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Path prefix for exported files (must end with forward slash if provided) """ - export_frequency: BlobStorageExportFrequency = pydantic_v1.Field( - alias="exportFrequency" - ) - enabled: bool = pydantic_v1.Field() + export_frequency: typing_extensions.Annotated[ + BlobStorageExportFrequency, FieldMetadata(alias="exportFrequency") + ] + enabled: bool = pydantic.Field() """ Whether the integration is active """ - force_path_style: bool = pydantic_v1.Field(alias="forcePathStyle") + force_path_style: typing_extensions.Annotated[ + bool, FieldMetadata(alias="forcePathStyle") + ] = pydantic.Field() """ Use path-style URLs for S3 requests """ - file_type: BlobStorageIntegrationFileType = pydantic_v1.Field(alias="fileType") - export_mode: BlobStorageExportMode = pydantic_v1.Field(alias="exportMode") - export_start_date: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="exportStartDate", default=None - ) + file_type: typing_extensions.Annotated[ + BlobStorageIntegrationFileType, FieldMetadata(alias="fileType") + ] + export_mode: typing_extensions.Annotated[ + BlobStorageExportMode, FieldMetadata(alias="exportMode") + ] + export_start_date: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="exportStartDate") + ] = pydantic.Field(default=None) """ Custom start date for exports (required when exportMode is FROM_CUSTOM_DATE) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/comments/__init__.py b/langfuse/api/resources/comments/__init__.py index e40c8546f..0588586c7 100644 --- a/langfuse/api/resources/comments/__init__.py +++ b/langfuse/api/resources/comments/__init__.py @@ -1,5 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .types import CreateCommentRequest, CreateCommentResponse, GetCommentsResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import CreateCommentRequest, CreateCommentResponse, GetCommentsResponse +_dynamic_imports: typing.Dict[str, str] = { + "CreateCommentRequest": ".types", + "CreateCommentResponse": ".types", + "GetCommentsResponse": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateCommentRequest", "CreateCommentResponse", "GetCommentsResponse"] diff --git a/langfuse/api/resources/comments/client.py b/langfuse/api/resources/comments/client.py index 9c78ca23f..a7b9915dc 100644 --- a/langfuse/api/resources/comments/client.py +++ b/langfuse/api/resources/comments/client.py @@ -1,19 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.comment import Comment +from .raw_client import AsyncRawCommentsClient, RawCommentsClient from .types.create_comment_request import CreateCommentRequest from .types.create_comment_response import CreateCommentResponse from .types.get_comments_response import GetCommentsResponse @@ -24,7 +16,18 @@ class CommentsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawCommentsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawCommentsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawCommentsClient + """ + return self._raw_client def create( self, @@ -48,8 +51,8 @@ def create( Examples -------- - from langfuse import CreateCommentRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.comments import CreateCommentRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -68,38 +71,10 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/comments", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateCommentResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, @@ -140,7 +115,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -152,43 +127,15 @@ def get( ) client.comments.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/comments", - method="GET", - params={ - "page": page, - "limit": limit, - "objectType": object_type, - "objectId": object_id, - "authorUserId": author_user_id, - }, + _response = self._raw_client.get( + page=page, + limit=limit, + object_type=object_type, + object_id=object_id, + author_user_id=author_user_id, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetCommentsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_by_id( self, @@ -213,7 +160,7 @@ def get_by_id( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -227,41 +174,26 @@ def get_by_id( comment_id="commentId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/comments/{jsonable_encoder(comment_id)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get_by_id( + comment_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncCommentsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawCommentsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawCommentsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawCommentsClient + """ + return self._raw_client async def create( self, @@ -287,8 +219,8 @@ async def create( -------- import asyncio - from langfuse import CreateCommentRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.comments import CreateCommentRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -313,38 +245,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/comments", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateCommentResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, @@ -387,7 +291,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -405,43 +309,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/comments", - method="GET", - params={ - "page": page, - "limit": limit, - "objectType": object_type, - "objectId": object_id, - "authorUserId": author_user_id, - }, + _response = await self._raw_client.get( + page=page, + limit=limit, + object_type=object_type, + object_id=object_id, + author_user_id=author_user_id, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetCommentsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_by_id( self, @@ -468,7 +344,7 @@ async def get_by_id( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -488,33 +364,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/comments/{jsonable_encoder(comment_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_by_id( + comment_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/comments/raw_client.py b/langfuse/api/resources/comments/raw_client.py new file mode 100644 index 000000000..443327345 --- /dev/null +++ b/langfuse/api/resources/comments/raw_client.py @@ -0,0 +1,710 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.comment import Comment +from .types.create_comment_request import CreateCommentRequest +from .types.create_comment_response import CreateCommentResponse +from .types.get_comments_response import GetCommentsResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawCommentsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + request: CreateCommentRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[CreateCommentResponse]: + """ + Create a comment. Comments may be attached to different object types (trace, observation, session, prompt). + + Parameters + ---------- + request : CreateCommentRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[CreateCommentResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/comments", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateCommentRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CreateCommentResponse, + parse_obj_as( + type_=CreateCommentResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + object_type: typing.Optional[str] = None, + object_id: typing.Optional[str] = None, + author_user_id: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GetCommentsResponse]: + """ + Get all comments + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit + + object_type : typing.Optional[str] + Filter comments by object type (trace, observation, session, prompt). + + object_id : typing.Optional[str] + Filter comments by object id. If objectType is not provided, an error will be thrown. + + author_user_id : typing.Optional[str] + Filter comments by author user id. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GetCommentsResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/comments", + method="GET", + params={ + "page": page, + "limit": limit, + "objectType": object_type, + "objectId": object_id, + "authorUserId": author_user_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetCommentsResponse, + parse_obj_as( + type_=GetCommentsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_by_id( + self, + comment_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Comment]: + """ + Get a comment by id + + Parameters + ---------- + comment_id : str + The unique langfuse identifier of a comment + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/comments/{jsonable_encoder(comment_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawCommentsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + request: CreateCommentRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[CreateCommentResponse]: + """ + Create a comment. Comments may be attached to different object types (trace, observation, session, prompt). + + Parameters + ---------- + request : CreateCommentRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[CreateCommentResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/comments", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateCommentRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CreateCommentResponse, + parse_obj_as( + type_=CreateCommentResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + object_type: typing.Optional[str] = None, + object_id: typing.Optional[str] = None, + author_user_id: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GetCommentsResponse]: + """ + Get all comments + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit + + object_type : typing.Optional[str] + Filter comments by object type (trace, observation, session, prompt). + + object_id : typing.Optional[str] + Filter comments by object id. If objectType is not provided, an error will be thrown. + + author_user_id : typing.Optional[str] + Filter comments by author user id. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GetCommentsResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/comments", + method="GET", + params={ + "page": page, + "limit": limit, + "objectType": object_type, + "objectId": object_id, + "authorUserId": author_user_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetCommentsResponse, + parse_obj_as( + type_=GetCommentsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_by_id( + self, + comment_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Comment]: + """ + Get a comment by id + + Parameters + ---------- + comment_id : str + The unique langfuse identifier of a comment + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/comments/{jsonable_encoder(comment_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/comments/types/__init__.py b/langfuse/api/resources/comments/types/__init__.py index 13dc1d8d9..4936025a0 100644 --- a/langfuse/api/resources/comments/types/__init__.py +++ b/langfuse/api/resources/comments/types/__init__.py @@ -1,7 +1,46 @@ # This file was auto-generated by Fern from our API Definition. -from .create_comment_request import CreateCommentRequest -from .create_comment_response import CreateCommentResponse -from .get_comments_response import GetCommentsResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_comment_request import CreateCommentRequest + from .create_comment_response import CreateCommentResponse + from .get_comments_response import GetCommentsResponse +_dynamic_imports: typing.Dict[str, str] = { + "CreateCommentRequest": ".create_comment_request", + "CreateCommentResponse": ".create_comment_response", + "GetCommentsResponse": ".get_comments_response", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateCommentRequest", "CreateCommentResponse", "GetCommentsResponse"] diff --git a/langfuse/api/resources/comments/types/create_comment_request.py b/langfuse/api/resources/comments/types/create_comment_request.py index 3c35c64e2..f31d477f2 100644 --- a/langfuse/api/resources/comments/types/create_comment_request.py +++ b/langfuse/api/resources/comments/types/create_comment_request.py @@ -1,69 +1,54 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class CreateCommentRequest(pydantic_v1.BaseModel): - project_id: str = pydantic_v1.Field(alias="projectId") +class CreateCommentRequest(UniversalBaseModel): + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] = ( + pydantic.Field() + ) """ The id of the project to attach the comment to. """ - object_type: str = pydantic_v1.Field(alias="objectType") + object_type: typing_extensions.Annotated[str, FieldMetadata(alias="objectType")] = ( + pydantic.Field() + ) """ The type of the object to attach the comment to (trace, observation, session, prompt). """ - object_id: str = pydantic_v1.Field(alias="objectId") + object_id: typing_extensions.Annotated[str, FieldMetadata(alias="objectId")] = ( + pydantic.Field() + ) """ The id of the object to attach the comment to. If this does not reference a valid existing object, an error will be thrown. """ - content: str = pydantic_v1.Field() + content: str = pydantic.Field() """ The content of the comment. May include markdown. Currently limited to 5000 characters. """ - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = pydantic.Field(default=None) """ The id of the user who created the comment. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/comments/types/create_comment_response.py b/langfuse/api/resources/comments/types/create_comment_response.py index d7708f798..1081d75b9 100644 --- a/langfuse/api/resources/comments/types/create_comment_response.py +++ b/langfuse/api/resources/comments/types/create_comment_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class CreateCommentResponse(pydantic_v1.BaseModel): - id: str = pydantic_v1.Field() +class CreateCommentResponse(UniversalBaseModel): + id: str = pydantic.Field() """ The id of the created object in Langfuse """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/comments/types/get_comments_response.py b/langfuse/api/resources/comments/types/get_comments_response.py index 66a8b9527..beb9bf6d6 100644 --- a/langfuse/api/resources/comments/types/get_comments_response.py +++ b/langfuse/api/resources/comments/types/get_comments_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.comment import Comment from ...utils.resources.pagination.types.meta_response import MetaResponse -class GetCommentsResponse(pydantic_v1.BaseModel): +class GetCommentsResponse(UniversalBaseModel): data: typing.List[Comment] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/__init__.py b/langfuse/api/resources/commons/__init__.py index 6dfbecafe..180f3b14e 100644 --- a/langfuse/api/resources/commons/__init__.py +++ b/langfuse/api/resources/commons/__init__.py @@ -1,56 +1,133 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - BaseScore, - BaseScoreV1, - BooleanScore, - BooleanScoreV1, - CategoricalScore, - CategoricalScoreV1, - Comment, - CommentObjectType, - ConfigCategory, - CreateScoreValue, - Dataset, - DatasetItem, - DatasetRun, - DatasetRunItem, - DatasetRunWithItems, - DatasetStatus, - MapValue, - Model, - ModelPrice, - ModelUsageUnit, - NumericScore, - NumericScoreV1, - Observation, - ObservationLevel, - ObservationsView, - Score, - ScoreConfig, - ScoreDataType, - ScoreSource, - ScoreV1, - ScoreV1_Boolean, - ScoreV1_Categorical, - ScoreV1_Numeric, - Score_Boolean, - Score_Categorical, - Score_Numeric, - Session, - SessionWithTraces, - Trace, - TraceWithDetails, - TraceWithFullDetails, - Usage, -) -from .errors import ( - AccessDeniedError, - Error, - MethodNotAllowedError, - NotFoundError, - UnauthorizedError, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + BaseScore, + BaseScoreV1, + BooleanScore, + BooleanScoreV1, + CategoricalScore, + CategoricalScoreV1, + Comment, + CommentObjectType, + ConfigCategory, + CreateScoreValue, + Dataset, + DatasetItem, + DatasetRun, + DatasetRunItem, + DatasetRunWithItems, + DatasetStatus, + MapValue, + Model, + ModelPrice, + ModelUsageUnit, + NumericScore, + NumericScoreV1, + Observation, + ObservationLevel, + ObservationsView, + Score, + ScoreConfig, + ScoreDataType, + ScoreSource, + ScoreV1, + Score_Boolean, + Score_Categorical, + Score_Numeric, + Session, + SessionWithTraces, + Trace, + TraceWithDetails, + TraceWithFullDetails, + Usage, + ) + from .errors import ( + AccessDeniedError, + Error, + MethodNotAllowedError, + NotFoundError, + UnauthorizedError, + ) +_dynamic_imports: typing.Dict[str, str] = { + "AccessDeniedError": ".errors", + "BaseScore": ".types", + "BaseScoreV1": ".types", + "BooleanScore": ".types", + "BooleanScoreV1": ".types", + "CategoricalScore": ".types", + "CategoricalScoreV1": ".types", + "Comment": ".types", + "CommentObjectType": ".types", + "ConfigCategory": ".types", + "CreateScoreValue": ".types", + "Dataset": ".types", + "DatasetItem": ".types", + "DatasetRun": ".types", + "DatasetRunItem": ".types", + "DatasetRunWithItems": ".types", + "DatasetStatus": ".types", + "Error": ".errors", + "MapValue": ".types", + "MethodNotAllowedError": ".errors", + "Model": ".types", + "ModelPrice": ".types", + "ModelUsageUnit": ".types", + "NotFoundError": ".errors", + "NumericScore": ".types", + "NumericScoreV1": ".types", + "Observation": ".types", + "ObservationLevel": ".types", + "ObservationsView": ".types", + "Score": ".types", + "ScoreConfig": ".types", + "ScoreDataType": ".types", + "ScoreSource": ".types", + "ScoreV1": ".types", + "Score_Boolean": ".types", + "Score_Categorical": ".types", + "Score_Numeric": ".types", + "Session": ".types", + "SessionWithTraces": ".types", + "Trace": ".types", + "TraceWithDetails": ".types", + "TraceWithFullDetails": ".types", + "UnauthorizedError": ".errors", + "Usage": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AccessDeniedError", @@ -87,9 +164,6 @@ "ScoreDataType", "ScoreSource", "ScoreV1", - "ScoreV1_Boolean", - "ScoreV1_Categorical", - "ScoreV1_Numeric", "Score_Boolean", "Score_Categorical", "Score_Numeric", diff --git a/langfuse/api/resources/commons/errors/__init__.py b/langfuse/api/resources/commons/errors/__init__.py index 0aef2f92f..c633139f0 100644 --- a/langfuse/api/resources/commons/errors/__init__.py +++ b/langfuse/api/resources/commons/errors/__init__.py @@ -1,10 +1,51 @@ # This file was auto-generated by Fern from our API Definition. -from .access_denied_error import AccessDeniedError -from .error import Error -from .method_not_allowed_error import MethodNotAllowedError -from .not_found_error import NotFoundError -from .unauthorized_error import UnauthorizedError +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .access_denied_error import AccessDeniedError + from .error import Error + from .method_not_allowed_error import MethodNotAllowedError + from .not_found_error import NotFoundError + from .unauthorized_error import UnauthorizedError +_dynamic_imports: typing.Dict[str, str] = { + "AccessDeniedError": ".access_denied_error", + "Error": ".error", + "MethodNotAllowedError": ".method_not_allowed_error", + "NotFoundError": ".not_found_error", + "UnauthorizedError": ".unauthorized_error", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AccessDeniedError", diff --git a/langfuse/api/resources/commons/errors/access_denied_error.py b/langfuse/api/resources/commons/errors/access_denied_error.py index 9114ba9ac..8798481e7 100644 --- a/langfuse/api/resources/commons/errors/access_denied_error.py +++ b/langfuse/api/resources/commons/errors/access_denied_error.py @@ -6,5 +6,7 @@ class AccessDeniedError(ApiError): - def __init__(self, body: typing.Any): - super().__init__(status_code=403, body=body) + def __init__( + self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None + ): + super().__init__(status_code=403, headers=headers, body=body) diff --git a/langfuse/api/resources/commons/errors/error.py b/langfuse/api/resources/commons/errors/error.py index 06020120c..351b9183e 100644 --- a/langfuse/api/resources/commons/errors/error.py +++ b/langfuse/api/resources/commons/errors/error.py @@ -6,5 +6,7 @@ class Error(ApiError): - def __init__(self, body: typing.Any): - super().__init__(status_code=400, body=body) + def __init__( + self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None + ): + super().__init__(status_code=400, headers=headers, body=body) diff --git a/langfuse/api/resources/commons/errors/method_not_allowed_error.py b/langfuse/api/resources/commons/errors/method_not_allowed_error.py index 32731a5c7..e0bfc8581 100644 --- a/langfuse/api/resources/commons/errors/method_not_allowed_error.py +++ b/langfuse/api/resources/commons/errors/method_not_allowed_error.py @@ -6,5 +6,7 @@ class MethodNotAllowedError(ApiError): - def __init__(self, body: typing.Any): - super().__init__(status_code=405, body=body) + def __init__( + self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None + ): + super().__init__(status_code=405, headers=headers, body=body) diff --git a/langfuse/api/resources/commons/errors/not_found_error.py b/langfuse/api/resources/commons/errors/not_found_error.py index 564ffca2c..2eb1f991b 100644 --- a/langfuse/api/resources/commons/errors/not_found_error.py +++ b/langfuse/api/resources/commons/errors/not_found_error.py @@ -6,5 +6,7 @@ class NotFoundError(ApiError): - def __init__(self, body: typing.Any): - super().__init__(status_code=404, body=body) + def __init__( + self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None + ): + super().__init__(status_code=404, headers=headers, body=body) diff --git a/langfuse/api/resources/commons/errors/unauthorized_error.py b/langfuse/api/resources/commons/errors/unauthorized_error.py index 2997f54f6..458171246 100644 --- a/langfuse/api/resources/commons/errors/unauthorized_error.py +++ b/langfuse/api/resources/commons/errors/unauthorized_error.py @@ -6,5 +6,7 @@ class UnauthorizedError(ApiError): - def __init__(self, body: typing.Any): - super().__init__(status_code=401, body=body) + def __init__( + self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None + ): + super().__init__(status_code=401, headers=headers, body=body) diff --git a/langfuse/api/resources/commons/types/__init__.py b/langfuse/api/resources/commons/types/__init__.py index 1c0d06a8d..3c54ca382 100644 --- a/langfuse/api/resources/commons/types/__init__.py +++ b/langfuse/api/resources/commons/types/__init__.py @@ -1,41 +1,116 @@ # This file was auto-generated by Fern from our API Definition. -from .base_score import BaseScore -from .base_score_v_1 import BaseScoreV1 -from .boolean_score import BooleanScore -from .boolean_score_v_1 import BooleanScoreV1 -from .categorical_score import CategoricalScore -from .categorical_score_v_1 import CategoricalScoreV1 -from .comment import Comment -from .comment_object_type import CommentObjectType -from .config_category import ConfigCategory -from .create_score_value import CreateScoreValue -from .dataset import Dataset -from .dataset_item import DatasetItem -from .dataset_run import DatasetRun -from .dataset_run_item import DatasetRunItem -from .dataset_run_with_items import DatasetRunWithItems -from .dataset_status import DatasetStatus -from .map_value import MapValue -from .model import Model -from .model_price import ModelPrice -from .model_usage_unit import ModelUsageUnit -from .numeric_score import NumericScore -from .numeric_score_v_1 import NumericScoreV1 -from .observation import Observation -from .observation_level import ObservationLevel -from .observations_view import ObservationsView -from .score import Score, Score_Boolean, Score_Categorical, Score_Numeric -from .score_config import ScoreConfig -from .score_data_type import ScoreDataType -from .score_source import ScoreSource -from .score_v_1 import ScoreV1, ScoreV1_Boolean, ScoreV1_Categorical, ScoreV1_Numeric -from .session import Session -from .session_with_traces import SessionWithTraces -from .trace import Trace -from .trace_with_details import TraceWithDetails -from .trace_with_full_details import TraceWithFullDetails -from .usage import Usage +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .base_score import BaseScore + from .base_score_v_1 import BaseScoreV1 + from .boolean_score import BooleanScore + from .boolean_score_v_1 import BooleanScoreV1 + from .categorical_score import CategoricalScore + from .categorical_score_v_1 import CategoricalScoreV1 + from .comment import Comment + from .comment_object_type import CommentObjectType + from .config_category import ConfigCategory + from .create_score_value import CreateScoreValue + from .dataset import Dataset + from .dataset_item import DatasetItem + from .dataset_run import DatasetRun + from .dataset_run_item import DatasetRunItem + from .dataset_run_with_items import DatasetRunWithItems + from .dataset_status import DatasetStatus + from .map_value import MapValue + from .model import Model + from .model_price import ModelPrice + from .model_usage_unit import ModelUsageUnit + from .numeric_score import NumericScore + from .numeric_score_v_1 import NumericScoreV1 + from .observation import Observation + from .observation_level import ObservationLevel + from .observations_view import ObservationsView + from .score import Score, Score_Boolean, Score_Categorical, Score_Numeric + from .score_config import ScoreConfig + from .score_data_type import ScoreDataType + from .score_source import ScoreSource + from .score_v_1 import ScoreV1 + from .session import Session + from .session_with_traces import SessionWithTraces + from .trace import Trace + from .trace_with_details import TraceWithDetails + from .trace_with_full_details import TraceWithFullDetails + from .usage import Usage +_dynamic_imports: typing.Dict[str, str] = { + "BaseScore": ".base_score", + "BaseScoreV1": ".base_score_v_1", + "BooleanScore": ".boolean_score", + "BooleanScoreV1": ".boolean_score_v_1", + "CategoricalScore": ".categorical_score", + "CategoricalScoreV1": ".categorical_score_v_1", + "Comment": ".comment", + "CommentObjectType": ".comment_object_type", + "ConfigCategory": ".config_category", + "CreateScoreValue": ".create_score_value", + "Dataset": ".dataset", + "DatasetItem": ".dataset_item", + "DatasetRun": ".dataset_run", + "DatasetRunItem": ".dataset_run_item", + "DatasetRunWithItems": ".dataset_run_with_items", + "DatasetStatus": ".dataset_status", + "MapValue": ".map_value", + "Model": ".model", + "ModelPrice": ".model_price", + "ModelUsageUnit": ".model_usage_unit", + "NumericScore": ".numeric_score", + "NumericScoreV1": ".numeric_score_v_1", + "Observation": ".observation", + "ObservationLevel": ".observation_level", + "ObservationsView": ".observations_view", + "Score": ".score", + "ScoreConfig": ".score_config", + "ScoreDataType": ".score_data_type", + "ScoreSource": ".score_source", + "ScoreV1": ".score_v_1", + "Score_Boolean": ".score", + "Score_Categorical": ".score", + "Score_Numeric": ".score", + "Session": ".session", + "SessionWithTraces": ".session_with_traces", + "Trace": ".trace", + "TraceWithDetails": ".trace_with_details", + "TraceWithFullDetails": ".trace_with_full_details", + "Usage": ".usage", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BaseScore", @@ -68,9 +143,6 @@ "ScoreDataType", "ScoreSource", "ScoreV1", - "ScoreV1_Boolean", - "ScoreV1_Categorical", - "ScoreV1_Numeric", "Score_Boolean", "Score_Categorical", "Score_Numeric", diff --git a/langfuse/api/resources/commons/types/base_score.py b/langfuse/api/resources/commons/types/base_score.py index dd5449c83..5478d45e3 100644 --- a/langfuse/api/resources/commons/types/base_score.py +++ b/langfuse/api/resources/commons/types/base_score.py @@ -3,77 +3,67 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .score_source import ScoreSource -class BaseScore(pydantic_v1.BaseModel): +class BaseScore(UniversalBaseModel): id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = pydantic.Field(default=None) """ Reference a score config on a score. When set, config and score name must be equal and value must comply to optionally defined numerical range """ - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = pydantic.Field(default=None) """ The annotation queue referenced by the score. Indicates if score was initially created while processing annotation queue. """ - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment from which this score originated. Can be any lowercase alphanumeric string with hyphens and underscores that does not start with 'langfuse'. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/base_score_v_1.py b/langfuse/api/resources/commons/types/base_score_v_1.py index 478dcc6e6..97ba18fba 100644 --- a/langfuse/api/resources/commons/types/base_score_v_1.py +++ b/langfuse/api/resources/commons/types/base_score_v_1.py @@ -3,71 +3,59 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .score_source import ScoreSource -class BaseScoreV1(pydantic_v1.BaseModel): +class BaseScoreV1(UniversalBaseModel): id: str - trace_id: str = pydantic_v1.Field(alias="traceId") + trace_id: typing_extensions.Annotated[str, FieldMetadata(alias="traceId")] name: str source: ScoreSource - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = pydantic.Field(default=None) """ Reference a score config on a score. When set, config and score name must be equal and value must comply to optionally defined numerical range """ - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = pydantic.Field(default=None) """ The annotation queue referenced by the score. Indicates if score was initially created while processing annotation queue. """ - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment from which this score originated. Can be any lowercase alphanumeric string with hyphens and underscores that does not start with 'langfuse'. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/boolean_score.py b/langfuse/api/resources/commons/types/boolean_score.py index d838b7db9..b7a7f2215 100644 --- a/langfuse/api/resources/commons/types/boolean_score.py +++ b/langfuse/api/resources/commons/types/boolean_score.py @@ -1,53 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .base_score import BaseScore class BooleanScore(BaseScore): - value: float = pydantic_v1.Field() + value: float = pydantic.Field() """ The numeric value of the score. Equals 1 for "True" and 0 for "False" """ - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[ + str, FieldMetadata(alias="stringValue") + ] = pydantic.Field() """ The string representation of the score value. Is inferred from the numeric value and equals "True" or "False" """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/boolean_score_v_1.py b/langfuse/api/resources/commons/types/boolean_score_v_1.py index 9f8e8935f..310f91d3b 100644 --- a/langfuse/api/resources/commons/types/boolean_score_v_1.py +++ b/langfuse/api/resources/commons/types/boolean_score_v_1.py @@ -1,53 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .base_score_v_1 import BaseScoreV1 class BooleanScoreV1(BaseScoreV1): - value: float = pydantic_v1.Field() + value: float = pydantic.Field() """ The numeric value of the score. Equals 1 for "True" and 0 for "False" """ - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[ + str, FieldMetadata(alias="stringValue") + ] = pydantic.Field() """ The string representation of the score value. Is inferred from the numeric value and equals "True" or "False" """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/categorical_score.py b/langfuse/api/resources/commons/types/categorical_score.py index 363ed03ff..ada0224ce 100644 --- a/langfuse/api/resources/commons/types/categorical_score.py +++ b/langfuse/api/resources/commons/types/categorical_score.py @@ -1,53 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .base_score import BaseScore class CategoricalScore(BaseScore): - value: float = pydantic_v1.Field() + value: float = pydantic.Field() """ Represents the numeric category mapping of the stringValue. If no config is linked, defaults to 0. """ - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[ + str, FieldMetadata(alias="stringValue") + ] = pydantic.Field() """ The string representation of the score value. If no config is linked, can be any string. Otherwise, must map to a config category """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/categorical_score_v_1.py b/langfuse/api/resources/commons/types/categorical_score_v_1.py index 2aa42d586..67e280286 100644 --- a/langfuse/api/resources/commons/types/categorical_score_v_1.py +++ b/langfuse/api/resources/commons/types/categorical_score_v_1.py @@ -1,53 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .base_score_v_1 import BaseScoreV1 class CategoricalScoreV1(BaseScoreV1): - value: float = pydantic_v1.Field() + value: float = pydantic.Field() """ Represents the numeric category mapping of the stringValue. If no config is linked, defaults to 0. """ - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[ + str, FieldMetadata(alias="stringValue") + ] = pydantic.Field() """ The string representation of the score value. If no config is linked, can be any string. Otherwise, must map to a config category """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/comment.py b/langfuse/api/resources/commons/types/comment.py index 4d8b1916a..77bb977ee 100644 --- a/langfuse/api/resources/commons/types/comment.py +++ b/langfuse/api/resources/commons/types/comment.py @@ -3,52 +3,38 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .comment_object_type import CommentObjectType -class Comment(pydantic_v1.BaseModel): +class Comment(UniversalBaseModel): id: str - project_id: str = pydantic_v1.Field(alias="projectId") - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - object_type: CommentObjectType = pydantic_v1.Field(alias="objectType") - object_id: str = pydantic_v1.Field(alias="objectId") + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + object_type: typing_extensions.Annotated[ + CommentObjectType, FieldMetadata(alias="objectType") + ] + object_id: typing_extensions.Annotated[str, FieldMetadata(alias="objectId")] content: str - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/comment_object_type.py b/langfuse/api/resources/commons/types/comment_object_type.py index 9c6c134c6..9677c4293 100644 --- a/langfuse/api/resources/commons/types/comment_object_type.py +++ b/langfuse/api/resources/commons/types/comment_object_type.py @@ -1,29 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class CommentObjectType(str, enum.Enum): - TRACE = "TRACE" - OBSERVATION = "OBSERVATION" - SESSION = "SESSION" - PROMPT = "PROMPT" - - def visit( - self, - trace: typing.Callable[[], T_Result], - observation: typing.Callable[[], T_Result], - session: typing.Callable[[], T_Result], - prompt: typing.Callable[[], T_Result], - ) -> T_Result: - if self is CommentObjectType.TRACE: - return trace() - if self is CommentObjectType.OBSERVATION: - return observation() - if self is CommentObjectType.SESSION: - return session() - if self is CommentObjectType.PROMPT: - return prompt() +CommentObjectType = typing.Union[ + typing.Literal["TRACE", "OBSERVATION", "SESSION", "PROMPT"], typing.Any +] diff --git a/langfuse/api/resources/commons/types/config_category.py b/langfuse/api/resources/commons/types/config_category.py index b1cbde9f2..7d3050eaf 100644 --- a/langfuse/api/resources/commons/types/config_category.py +++ b/langfuse/api/resources/commons/types/config_category.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ConfigCategory(pydantic_v1.BaseModel): +class ConfigCategory(UniversalBaseModel): value: float label: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/dataset.py b/langfuse/api/resources/commons/types/dataset.py index 116bff135..5a2726552 100644 --- a/langfuse/api/resources/commons/types/dataset.py +++ b/langfuse/api/resources/commons/types/dataset.py @@ -3,62 +3,46 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class Dataset(pydantic_v1.BaseModel): +class Dataset(UniversalBaseModel): id: str name: str description: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - input_schema: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="inputSchema", default=None - ) + input_schema: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="inputSchema") + ] = pydantic.Field(default=None) """ JSON Schema for validating dataset item inputs """ - expected_output_schema: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="expectedOutputSchema", default=None - ) + expected_output_schema: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="expectedOutputSchema") + ] = pydantic.Field(default=None) """ JSON Schema for validating dataset item expected outputs """ - project_id: str = pydantic_v1.Field(alias="projectId") - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/dataset_item.py b/langfuse/api/resources/commons/types/dataset_item.py index dd5f85e78..7954ec3cb 100644 --- a/langfuse/api/resources/commons/types/dataset_item.py +++ b/langfuse/api/resources/commons/types/dataset_item.py @@ -3,59 +3,43 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .dataset_status import DatasetStatus -class DatasetItem(pydantic_v1.BaseModel): +class DatasetItem(UniversalBaseModel): id: str status: DatasetStatus input: typing.Optional[typing.Any] = None - expected_output: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="expectedOutput", default=None - ) + expected_output: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="expectedOutput") + ] = None metadata: typing.Optional[typing.Any] = None - source_trace_id: typing.Optional[str] = pydantic_v1.Field( - alias="sourceTraceId", default=None - ) - source_observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="sourceObservationId", default=None - ) - dataset_id: str = pydantic_v1.Field(alias="datasetId") - dataset_name: str = pydantic_v1.Field(alias="datasetName") - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + source_trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sourceTraceId") + ] = None + source_observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sourceObservationId") + ] = None + dataset_id: typing_extensions.Annotated[str, FieldMetadata(alias="datasetId")] + dataset_name: typing_extensions.Annotated[str, FieldMetadata(alias="datasetName")] + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/dataset_run.py b/langfuse/api/resources/commons/types/dataset_run.py index 74b1a2ac8..e9f6f4458 100644 --- a/langfuse/api/resources/commons/types/dataset_run.py +++ b/langfuse/api/resources/commons/types/dataset_run.py @@ -3,80 +3,68 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class DatasetRun(pydantic_v1.BaseModel): - id: str = pydantic_v1.Field() +class DatasetRun(UniversalBaseModel): + id: str = pydantic.Field() """ Unique identifier of the dataset run """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ Name of the dataset run """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description of the run """ - metadata: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + metadata: typing.Optional[typing.Any] = pydantic.Field(default=None) """ Metadata of the dataset run """ - dataset_id: str = pydantic_v1.Field(alias="datasetId") + dataset_id: typing_extensions.Annotated[str, FieldMetadata(alias="datasetId")] = ( + pydantic.Field() + ) """ Id of the associated dataset """ - dataset_name: str = pydantic_v1.Field(alias="datasetName") + dataset_name: typing_extensions.Annotated[ + str, FieldMetadata(alias="datasetName") + ] = pydantic.Field() """ Name of the associated dataset """ - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] = pydantic.Field() """ The date and time when the dataset run was created """ - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] = pydantic.Field() """ The date and time when the dataset run was last updated """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/dataset_run_item.py b/langfuse/api/resources/commons/types/dataset_run_item.py index f1b3af163..6980cc087 100644 --- a/langfuse/api/resources/commons/types/dataset_run_item.py +++ b/langfuse/api/resources/commons/types/dataset_run_item.py @@ -3,51 +3,41 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class DatasetRunItem(pydantic_v1.BaseModel): +class DatasetRunItem(UniversalBaseModel): id: str - dataset_run_id: str = pydantic_v1.Field(alias="datasetRunId") - dataset_run_name: str = pydantic_v1.Field(alias="datasetRunName") - dataset_item_id: str = pydantic_v1.Field(alias="datasetItemId") - trace_id: str = pydantic_v1.Field(alias="traceId") - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + dataset_run_id: typing_extensions.Annotated[ + str, FieldMetadata(alias="datasetRunId") + ] + dataset_run_name: typing_extensions.Annotated[ + str, FieldMetadata(alias="datasetRunName") + ] + dataset_item_id: typing_extensions.Annotated[ + str, FieldMetadata(alias="datasetItemId") + ] + trace_id: typing_extensions.Annotated[str, FieldMetadata(alias="traceId")] + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/dataset_run_with_items.py b/langfuse/api/resources/commons/types/dataset_run_with_items.py index 647d2c553..b234491d5 100644 --- a/langfuse/api/resources/commons/types/dataset_run_with_items.py +++ b/langfuse/api/resources/commons/types/dataset_run_with_items.py @@ -1,48 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .dataset_run import DatasetRun from .dataset_run_item import DatasetRunItem class DatasetRunWithItems(DatasetRun): - dataset_run_items: typing.List[DatasetRunItem] = pydantic_v1.Field( - alias="datasetRunItems" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + dataset_run_items: typing_extensions.Annotated[ + typing.List[DatasetRunItem], FieldMetadata(alias="datasetRunItems") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/dataset_status.py b/langfuse/api/resources/commons/types/dataset_status.py index 09eac62fe..f09c011d2 100644 --- a/langfuse/api/resources/commons/types/dataset_status.py +++ b/langfuse/api/resources/commons/types/dataset_status.py @@ -1,21 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class DatasetStatus(str, enum.Enum): - ACTIVE = "ACTIVE" - ARCHIVED = "ARCHIVED" - - def visit( - self, - active: typing.Callable[[], T_Result], - archived: typing.Callable[[], T_Result], - ) -> T_Result: - if self is DatasetStatus.ACTIVE: - return active() - if self is DatasetStatus.ARCHIVED: - return archived() +DatasetStatus = typing.Union[typing.Literal["ACTIVE", "ARCHIVED"], typing.Any] diff --git a/langfuse/api/resources/commons/types/map_value.py b/langfuse/api/resources/commons/types/map_value.py index e1e771a9b..bac2d52bc 100644 --- a/langfuse/api/resources/commons/types/map_value.py +++ b/langfuse/api/resources/commons/types/map_value.py @@ -6,5 +6,6 @@ typing.Optional[str], typing.Optional[int], typing.Optional[bool], + typing.Optional[float], typing.Optional[typing.List[str]], ] diff --git a/langfuse/api/resources/commons/types/model.py b/langfuse/api/resources/commons/types/model.py index ea3922ee9..10025f7ae 100644 --- a/langfuse/api/resources/commons/types/model.py +++ b/langfuse/api/resources/commons/types/model.py @@ -3,110 +3,96 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .model_price import ModelPrice from .model_usage_unit import ModelUsageUnit -class Model(pydantic_v1.BaseModel): +class Model(UniversalBaseModel): """ Model definition used for transforming usage into USD cost and/or tokenization. """ id: str - model_name: str = pydantic_v1.Field(alias="modelName") + model_name: typing_extensions.Annotated[str, FieldMetadata(alias="modelName")] = ( + pydantic.Field() + ) """ Name of the model definition. If multiple with the same name exist, they are applied in the following order: (1) custom over built-in, (2) newest according to startTime where model.startTime str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/model_price.py b/langfuse/api/resources/commons/types/model_price.py index 8882004e7..fdd39acbc 100644 --- a/langfuse/api/resources/commons/types/model_price.py +++ b/langfuse/api/resources/commons/types/model_price.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPrice(pydantic_v1.BaseModel): +class ModelPrice(UniversalBaseModel): price: float - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/model_usage_unit.py b/langfuse/api/resources/commons/types/model_usage_unit.py index 35253f92e..6705b47e3 100644 --- a/langfuse/api/resources/commons/types/model_usage_unit.py +++ b/langfuse/api/resources/commons/types/model_usage_unit.py @@ -1,41 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class ModelUsageUnit(str, enum.Enum): - """ - Unit of usage in Langfuse - """ - - CHARACTERS = "CHARACTERS" - TOKENS = "TOKENS" - MILLISECONDS = "MILLISECONDS" - SECONDS = "SECONDS" - IMAGES = "IMAGES" - REQUESTS = "REQUESTS" - - def visit( - self, - characters: typing.Callable[[], T_Result], - tokens: typing.Callable[[], T_Result], - milliseconds: typing.Callable[[], T_Result], - seconds: typing.Callable[[], T_Result], - images: typing.Callable[[], T_Result], - requests: typing.Callable[[], T_Result], - ) -> T_Result: - if self is ModelUsageUnit.CHARACTERS: - return characters() - if self is ModelUsageUnit.TOKENS: - return tokens() - if self is ModelUsageUnit.MILLISECONDS: - return milliseconds() - if self is ModelUsageUnit.SECONDS: - return seconds() - if self is ModelUsageUnit.IMAGES: - return images() - if self is ModelUsageUnit.REQUESTS: - return requests() +ModelUsageUnit = typing.Union[ + typing.Literal[ + "CHARACTERS", "TOKENS", "MILLISECONDS", "SECONDS", "IMAGES", "REQUESTS" + ], + typing.Any, +] diff --git a/langfuse/api/resources/commons/types/numeric_score.py b/langfuse/api/resources/commons/types/numeric_score.py index d7f860cd5..977909246 100644 --- a/langfuse/api/resources/commons/types/numeric_score.py +++ b/langfuse/api/resources/commons/types/numeric_score.py @@ -1,48 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_score import BaseScore class NumericScore(BaseScore): - value: float = pydantic_v1.Field() + value: float = pydantic.Field() """ The numeric value of the score """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/numeric_score_v_1.py b/langfuse/api/resources/commons/types/numeric_score_v_1.py index 773d84b46..739b8e1af 100644 --- a/langfuse/api/resources/commons/types/numeric_score_v_1.py +++ b/langfuse/api/resources/commons/types/numeric_score_v_1.py @@ -1,48 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_score_v_1 import BaseScoreV1 class NumericScoreV1(BaseScoreV1): - value: float = pydantic_v1.Field() + value: float = pydantic.Field() """ The numeric value of the score """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/observation.py b/langfuse/api/resources/commons/types/observation.py index b821476f9..d0c02c477 100644 --- a/langfuse/api/resources/commons/types/observation.py +++ b/langfuse/api/resources/commons/types/observation.py @@ -3,162 +3,149 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .map_value import MapValue from .observation_level import ObservationLevel from .usage import Usage -class Observation(pydantic_v1.BaseModel): - id: str = pydantic_v1.Field() +class Observation(UniversalBaseModel): + id: str = pydantic.Field() """ The unique identifier of the observation """ - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = pydantic.Field(default=None) """ The trace ID associated with the observation """ - type: str = pydantic_v1.Field() + type: str = pydantic.Field() """ The type of the observation """ - name: typing.Optional[str] = pydantic_v1.Field(default=None) + name: typing.Optional[str] = pydantic.Field(default=None) """ The name of the observation """ - start_time: dt.datetime = pydantic_v1.Field(alias="startTime") + start_time: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="startTime") + ] = pydantic.Field() """ The start time of the observation """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="endTime", default=None - ) + end_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="endTime") + ] = pydantic.Field(default=None) """ The end time of the observation. """ - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="completionStartTime", default=None - ) + completion_start_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="completionStartTime") + ] = pydantic.Field(default=None) """ The completion start time of the observation """ - model: typing.Optional[str] = pydantic_v1.Field(default=None) + model: typing.Optional[str] = pydantic.Field(default=None) """ The model used for the observation """ - model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( - alias="modelParameters", default=None - ) + model_parameters: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, MapValue]], + FieldMetadata(alias="modelParameters"), + ] = pydantic.Field(default=None) """ The parameters of the model used for the observation """ - input: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + input: typing.Optional[typing.Any] = pydantic.Field(default=None) """ The input data of the observation """ - version: typing.Optional[str] = pydantic_v1.Field(default=None) + version: typing.Optional[str] = pydantic.Field(default=None) """ The version of the observation """ - metadata: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + metadata: typing.Optional[typing.Any] = pydantic.Field(default=None) """ Additional metadata of the observation """ - output: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + output: typing.Optional[typing.Any] = pydantic.Field(default=None) """ The output data of the observation """ - usage: typing.Optional[Usage] = pydantic_v1.Field(default=None) + usage: typing.Optional[Usage] = pydantic.Field(default=None) """ (Deprecated. Use usageDetails and costDetails instead.) The usage data of the observation """ - level: ObservationLevel = pydantic_v1.Field() + level: ObservationLevel = pydantic.Field() """ The level of the observation """ - status_message: typing.Optional[str] = pydantic_v1.Field( - alias="statusMessage", default=None - ) + status_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="statusMessage") + ] = pydantic.Field(default=None) """ The status message of the observation """ - parent_observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="parentObservationId", default=None - ) + parent_observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="parentObservationId") + ] = pydantic.Field(default=None) """ The parent observation ID """ - prompt_id: typing.Optional[str] = pydantic_v1.Field(alias="promptId", default=None) + prompt_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="promptId") + ] = pydantic.Field(default=None) """ The prompt ID associated with the observation """ - usage_details: typing.Optional[typing.Dict[str, int]] = pydantic_v1.Field( - alias="usageDetails", default=None - ) + usage_details: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, int]], FieldMetadata(alias="usageDetails") + ] = pydantic.Field(default=None) """ The usage details of the observation. Key is the name of the usage metric, value is the number of units consumed. The total key is the sum of all (non-total) usage metrics or the total value ingested. """ - cost_details: typing.Optional[typing.Dict[str, float]] = pydantic_v1.Field( - alias="costDetails", default=None - ) + cost_details: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, float]], FieldMetadata(alias="costDetails") + ] = pydantic.Field(default=None) """ The cost details of the observation. Key is the name of the cost metric, value is the cost in USD. The total key is the sum of all (non-total) cost metrics or the total value ingested. """ - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment from which this observation originated. Can be any lowercase alphanumeric string with hyphens and underscores that does not start with 'langfuse'. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/observation_level.py b/langfuse/api/resources/commons/types/observation_level.py index c33e87b59..8a42779ca 100644 --- a/langfuse/api/resources/commons/types/observation_level.py +++ b/langfuse/api/resources/commons/types/observation_level.py @@ -1,29 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class ObservationLevel(str, enum.Enum): - DEBUG = "DEBUG" - DEFAULT = "DEFAULT" - WARNING = "WARNING" - ERROR = "ERROR" - - def visit( - self, - debug: typing.Callable[[], T_Result], - default: typing.Callable[[], T_Result], - warning: typing.Callable[[], T_Result], - error: typing.Callable[[], T_Result], - ) -> T_Result: - if self is ObservationLevel.DEBUG: - return debug() - if self is ObservationLevel.DEFAULT: - return default() - if self is ObservationLevel.WARNING: - return warning() - if self is ObservationLevel.ERROR: - return error() +ObservationLevel = typing.Union[ + typing.Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"], typing.Any +] diff --git a/langfuse/api/resources/commons/types/observations_view.py b/langfuse/api/resources/commons/types/observations_view.py index e011fa32b..002208027 100644 --- a/langfuse/api/resources/commons/types/observations_view.py +++ b/langfuse/api/resources/commons/types/observations_view.py @@ -1,116 +1,97 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .observation import Observation class ObservationsView(Observation): - prompt_name: typing.Optional[str] = pydantic_v1.Field( - alias="promptName", default=None - ) + prompt_name: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="promptName") + ] = pydantic.Field(default=None) """ The name of the prompt associated with the observation """ - prompt_version: typing.Optional[int] = pydantic_v1.Field( - alias="promptVersion", default=None - ) + prompt_version: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="promptVersion") + ] = pydantic.Field(default=None) """ The version of the prompt associated with the observation """ - model_id: typing.Optional[str] = pydantic_v1.Field(alias="modelId", default=None) + model_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="modelId") + ] = pydantic.Field(default=None) """ The unique identifier of the model """ - input_price: typing.Optional[float] = pydantic_v1.Field( - alias="inputPrice", default=None - ) + input_price: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="inputPrice") + ] = pydantic.Field(default=None) """ The price of the input in USD """ - output_price: typing.Optional[float] = pydantic_v1.Field( - alias="outputPrice", default=None - ) + output_price: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="outputPrice") + ] = pydantic.Field(default=None) """ The price of the output in USD. """ - total_price: typing.Optional[float] = pydantic_v1.Field( - alias="totalPrice", default=None - ) + total_price: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="totalPrice") + ] = pydantic.Field(default=None) """ The total price in USD. """ - calculated_input_cost: typing.Optional[float] = pydantic_v1.Field( - alias="calculatedInputCost", default=None - ) + calculated_input_cost: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="calculatedInputCost") + ] = pydantic.Field(default=None) """ (Deprecated. Use usageDetails and costDetails instead.) The calculated cost of the input in USD """ - calculated_output_cost: typing.Optional[float] = pydantic_v1.Field( - alias="calculatedOutputCost", default=None - ) + calculated_output_cost: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="calculatedOutputCost") + ] = pydantic.Field(default=None) """ (Deprecated. Use usageDetails and costDetails instead.) The calculated cost of the output in USD """ - calculated_total_cost: typing.Optional[float] = pydantic_v1.Field( - alias="calculatedTotalCost", default=None - ) + calculated_total_cost: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="calculatedTotalCost") + ] = pydantic.Field(default=None) """ (Deprecated. Use usageDetails and costDetails instead.) The calculated total cost in USD """ - latency: typing.Optional[float] = pydantic_v1.Field(default=None) + latency: typing.Optional[float] = pydantic.Field(default=None) """ The latency in seconds. """ - time_to_first_token: typing.Optional[float] = pydantic_v1.Field( - alias="timeToFirstToken", default=None - ) + time_to_first_token: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="timeToFirstToken") + ] = pydantic.Field(default=None) """ The time to the first token in seconds """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/score.py b/langfuse/api/resources/commons/types/score.py index f0b866067..040ab6cbb 100644 --- a/langfuse/api/resources/commons/types/score.py +++ b/langfuse/api/resources/commons/types/score.py @@ -5,203 +5,172 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .score_source import ScoreSource -class Score_Numeric(pydantic_v1.BaseModel): +class Score_Numeric(UniversalBaseModel): + data_type: typing_extensions.Annotated[ + typing.Literal["NUMERIC"], FieldMetadata(alias="dataType") + ] = "NUMERIC" value: float id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = None + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = None environment: typing.Optional[str] = None - data_type: typing.Literal["NUMERIC"] = pydantic_v1.Field( - alias="dataType", default="NUMERIC" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class Score_Categorical(pydantic_v1.BaseModel): + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class Score_Categorical(UniversalBaseModel): + data_type: typing_extensions.Annotated[ + typing.Literal["CATEGORICAL"], FieldMetadata(alias="dataType") + ] = "CATEGORICAL" value: float - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[str, FieldMetadata(alias="stringValue")] id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = None + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = None environment: typing.Optional[str] = None - data_type: typing.Literal["CATEGORICAL"] = pydantic_v1.Field( - alias="dataType", default="CATEGORICAL" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class Score_Boolean(pydantic_v1.BaseModel): + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class Score_Boolean(UniversalBaseModel): + data_type: typing_extensions.Annotated[ + typing.Literal["BOOLEAN"], FieldMetadata(alias="dataType") + ] = "BOOLEAN" value: float - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[str, FieldMetadata(alias="stringValue")] id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = None + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = None environment: typing.Optional[str] = None - data_type: typing.Literal["BOOLEAN"] = pydantic_v1.Field( - alias="dataType", default="BOOLEAN" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -Score = typing.Union[Score_Numeric, Score_Categorical, Score_Boolean] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +Score = typing_extensions.Annotated[ + typing.Union[Score_Numeric, Score_Categorical, Score_Boolean], + pydantic.Field(discriminator="dataType"), +] diff --git a/langfuse/api/resources/commons/types/score_config.py b/langfuse/api/resources/commons/types/score_config.py index 4a7b30e0e..2d43c48c8 100644 --- a/langfuse/api/resources/commons/types/score_config.py +++ b/langfuse/api/resources/commons/types/score_config.py @@ -3,43 +3,53 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .config_category import ConfigCategory from .score_data_type import ScoreDataType -class ScoreConfig(pydantic_v1.BaseModel): +class ScoreConfig(UniversalBaseModel): """ Configuration for a score """ id: str name: str - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - project_id: str = pydantic_v1.Field(alias="projectId") - data_type: ScoreDataType = pydantic_v1.Field(alias="dataType") - is_archived: bool = pydantic_v1.Field(alias="isArchived") + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] + data_type: typing_extensions.Annotated[ + ScoreDataType, FieldMetadata(alias="dataType") + ] + is_archived: typing_extensions.Annotated[ + bool, FieldMetadata(alias="isArchived") + ] = pydantic.Field() """ Whether the score config is archived. Defaults to false """ - min_value: typing.Optional[float] = pydantic_v1.Field( - alias="minValue", default=None - ) + min_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="minValue") + ] = pydantic.Field(default=None) """ Sets minimum value for numerical scores. If not set, the minimum value defaults to -∞ """ - max_value: typing.Optional[float] = pydantic_v1.Field( - alias="maxValue", default=None - ) + max_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="maxValue") + ] = pydantic.Field(default=None) """ Sets maximum value for numerical scores. If not set, the maximum value defaults to +∞ """ - categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field( + categories: typing.Optional[typing.List[ConfigCategory]] = pydantic.Field( default=None ) """ @@ -48,35 +58,13 @@ class ScoreConfig(pydantic_v1.BaseModel): description: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/score_data_type.py b/langfuse/api/resources/commons/types/score_data_type.py index c2eed12cd..d3be48c06 100644 --- a/langfuse/api/resources/commons/types/score_data_type.py +++ b/langfuse/api/resources/commons/types/score_data_type.py @@ -1,25 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class ScoreDataType(str, enum.Enum): - NUMERIC = "NUMERIC" - BOOLEAN = "BOOLEAN" - CATEGORICAL = "CATEGORICAL" - - def visit( - self, - numeric: typing.Callable[[], T_Result], - boolean: typing.Callable[[], T_Result], - categorical: typing.Callable[[], T_Result], - ) -> T_Result: - if self is ScoreDataType.NUMERIC: - return numeric() - if self is ScoreDataType.BOOLEAN: - return boolean() - if self is ScoreDataType.CATEGORICAL: - return categorical() +ScoreDataType = typing.Union[ + typing.Literal["NUMERIC", "BOOLEAN", "CATEGORICAL"], typing.Any +] diff --git a/langfuse/api/resources/commons/types/score_source.py b/langfuse/api/resources/commons/types/score_source.py index 699f078b7..036918272 100644 --- a/langfuse/api/resources/commons/types/score_source.py +++ b/langfuse/api/resources/commons/types/score_source.py @@ -1,25 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class ScoreSource(str, enum.Enum): - ANNOTATION = "ANNOTATION" - API = "API" - EVAL = "EVAL" - - def visit( - self, - annotation: typing.Callable[[], T_Result], - api: typing.Callable[[], T_Result], - eval: typing.Callable[[], T_Result], - ) -> T_Result: - if self is ScoreSource.ANNOTATION: - return annotation() - if self is ScoreSource.API: - return api() - if self is ScoreSource.EVAL: - return eval() +ScoreSource = typing.Union[typing.Literal["ANNOTATION", "API", "EVAL"], typing.Any] diff --git a/langfuse/api/resources/commons/types/score_v_1.py b/langfuse/api/resources/commons/types/score_v_1.py index 191e0d96f..2d9c53771 100644 --- a/langfuse/api/resources/commons/types/score_v_1.py +++ b/langfuse/api/resources/commons/types/score_v_1.py @@ -1,189 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from __future__ import annotations - -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .score_source import ScoreSource - - -class ScoreV1_Numeric(pydantic_v1.BaseModel): - value: float - id: str - trace_id: str = pydantic_v1.Field(alias="traceId") - name: str - source: ScoreSource - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) - comment: typing.Optional[str] = None - metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) - environment: typing.Optional[str] = None - data_type: typing.Literal["NUMERIC"] = pydantic_v1.Field( - alias="dataType", default="NUMERIC" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class ScoreV1_Categorical(pydantic_v1.BaseModel): - value: float - string_value: str = pydantic_v1.Field(alias="stringValue") - id: str - trace_id: str = pydantic_v1.Field(alias="traceId") - name: str - source: ScoreSource - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) - comment: typing.Optional[str] = None - metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) - environment: typing.Optional[str] = None - data_type: typing.Literal["CATEGORICAL"] = pydantic_v1.Field( - alias="dataType", default="CATEGORICAL" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class ScoreV1_Boolean(pydantic_v1.BaseModel): - value: float - string_value: str = pydantic_v1.Field(alias="stringValue") - id: str - trace_id: str = pydantic_v1.Field(alias="traceId") - name: str - source: ScoreSource - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) - comment: typing.Optional[str] = None - metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) - environment: typing.Optional[str] = None - data_type: typing.Literal["BOOLEAN"] = pydantic_v1.Field( - alias="dataType", default="BOOLEAN" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - +from .boolean_score_v_1 import BooleanScoreV1 +from .categorical_score_v_1 import CategoricalScoreV1 +from .numeric_score_v_1 import NumericScoreV1 -ScoreV1 = typing.Union[ScoreV1_Numeric, ScoreV1_Categorical, ScoreV1_Boolean] +ScoreV1 = typing.Union[NumericScoreV1, CategoricalScoreV1, BooleanScoreV1] diff --git a/langfuse/api/resources/commons/types/session.py b/langfuse/api/resources/commons/types/session.py index 46a0a6b96..ad582834c 100644 --- a/langfuse/api/resources/commons/types/session.py +++ b/langfuse/api/resources/commons/types/session.py @@ -3,48 +3,30 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class Session(pydantic_v1.BaseModel): +class Session(UniversalBaseModel): id: str - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - project_id: str = pydantic_v1.Field(alias="projectId") - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + project_id: typing_extensions.Annotated[str, FieldMetadata(alias="projectId")] + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment from which this session originated. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/session_with_traces.py b/langfuse/api/resources/commons/types/session_with_traces.py index b5465daa9..82fe8ef27 100644 --- a/langfuse/api/resources/commons/types/session_with_traces.py +++ b/langfuse/api/resources/commons/types/session_with_traces.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .session import Session from .trace import Trace @@ -12,35 +11,13 @@ class SessionWithTraces(Session): traces: typing.List[Trace] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/trace.py b/langfuse/api/resources/commons/types/trace.py index d977ed3d7..1085689e6 100644 --- a/langfuse/api/resources/commons/types/trace.py +++ b/langfuse/api/resources/commons/types/trace.py @@ -3,107 +3,89 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class Trace(pydantic_v1.BaseModel): - id: str = pydantic_v1.Field() +class Trace(UniversalBaseModel): + id: str = pydantic.Field() """ The unique identifier of a trace """ - timestamp: dt.datetime = pydantic_v1.Field() + timestamp: dt.datetime = pydantic.Field() """ The timestamp when the trace was created """ - name: typing.Optional[str] = pydantic_v1.Field(default=None) + name: typing.Optional[str] = pydantic.Field(default=None) """ The name of the trace """ - input: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + input: typing.Optional[typing.Any] = pydantic.Field(default=None) """ The input data of the trace. Can be any JSON. """ - output: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + output: typing.Optional[typing.Any] = pydantic.Field(default=None) """ The output data of the trace. Can be any JSON. """ - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = pydantic.Field(default=None) """ The session identifier associated with the trace """ - release: typing.Optional[str] = pydantic_v1.Field(default=None) + release: typing.Optional[str] = pydantic.Field(default=None) """ The release version of the application when the trace was created """ - version: typing.Optional[str] = pydantic_v1.Field(default=None) + version: typing.Optional[str] = pydantic.Field(default=None) """ The version of the trace """ - user_id: typing.Optional[str] = pydantic_v1.Field(alias="userId", default=None) + user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="userId") + ] = pydantic.Field(default=None) """ The user identifier associated with the trace """ - metadata: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + metadata: typing.Optional[typing.Any] = pydantic.Field(default=None) """ The metadata associated with the trace. Can be any JSON. """ - tags: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + tags: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ The tags associated with the trace. Can be an array of strings or null. """ - public: typing.Optional[bool] = pydantic_v1.Field(default=None) + public: typing.Optional[bool] = pydantic.Field(default=None) """ Public traces are accessible via url without login """ - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment from which this trace originated. Can be any lowercase alphanumeric string with hyphens and underscores that does not start with 'langfuse'. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/trace_with_details.py b/langfuse/api/resources/commons/types/trace_with_details.py index 5ffe6f218..a6296c2f5 100644 --- a/langfuse/api/resources/commons/types/trace_with_details.py +++ b/langfuse/api/resources/commons/types/trace_with_details.py @@ -1,68 +1,51 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .trace import Trace class TraceWithDetails(Trace): - html_path: str = pydantic_v1.Field(alias="htmlPath") + html_path: typing_extensions.Annotated[str, FieldMetadata(alias="htmlPath")] = ( + pydantic.Field() + ) """ Path of trace in Langfuse UI """ - latency: float = pydantic_v1.Field() + latency: float = pydantic.Field() """ Latency of trace in seconds """ - total_cost: float = pydantic_v1.Field(alias="totalCost") + total_cost: typing_extensions.Annotated[float, FieldMetadata(alias="totalCost")] = ( + pydantic.Field() + ) """ Cost of trace in USD """ - observations: typing.List[str] = pydantic_v1.Field() + observations: typing.List[str] = pydantic.Field() """ List of observation ids """ - scores: typing.List[str] = pydantic_v1.Field() + scores: typing.List[str] = pydantic.Field() """ List of score ids """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/trace_with_full_details.py b/langfuse/api/resources/commons/types/trace_with_full_details.py index 2c6a99402..dbe76e35b 100644 --- a/langfuse/api/resources/commons/types/trace_with_full_details.py +++ b/langfuse/api/resources/commons/types/trace_with_full_details.py @@ -1,70 +1,53 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .observations_view import ObservationsView from .score_v_1 import ScoreV1 from .trace import Trace class TraceWithFullDetails(Trace): - html_path: str = pydantic_v1.Field(alias="htmlPath") + html_path: typing_extensions.Annotated[str, FieldMetadata(alias="htmlPath")] = ( + pydantic.Field() + ) """ Path of trace in Langfuse UI """ - latency: float = pydantic_v1.Field() + latency: float = pydantic.Field() """ Latency of trace in seconds """ - total_cost: float = pydantic_v1.Field(alias="totalCost") + total_cost: typing_extensions.Annotated[float, FieldMetadata(alias="totalCost")] = ( + pydantic.Field() + ) """ Cost of trace in USD """ - observations: typing.List[ObservationsView] = pydantic_v1.Field() + observations: typing.List[ObservationsView] = pydantic.Field() """ List of observations """ - scores: typing.List[ScoreV1] = pydantic_v1.Field() + scores: typing.List[ScoreV1] = pydantic.Field() """ List of scores """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/commons/types/usage.py b/langfuse/api/resources/commons/types/usage.py index c38330494..341536254 100644 --- a/langfuse/api/resources/commons/types/usage.py +++ b/langfuse/api/resources/commons/types/usage.py @@ -1,84 +1,63 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .model_usage_unit import ModelUsageUnit -class Usage(pydantic_v1.BaseModel): +class Usage(UniversalBaseModel): """ (Deprecated. Use usageDetails and costDetails instead.) Standard interface for usage and cost """ - input: typing.Optional[int] = pydantic_v1.Field(default=None) + input: typing.Optional[int] = pydantic.Field(default=None) """ Number of input units (e.g. tokens) """ - output: typing.Optional[int] = pydantic_v1.Field(default=None) + output: typing.Optional[int] = pydantic.Field(default=None) """ Number of output units (e.g. tokens) """ - total: typing.Optional[int] = pydantic_v1.Field(default=None) + total: typing.Optional[int] = pydantic.Field(default=None) """ Defaults to input+output if not set """ unit: typing.Optional[ModelUsageUnit] = None - input_cost: typing.Optional[float] = pydantic_v1.Field( - alias="inputCost", default=None - ) + input_cost: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="inputCost") + ] = pydantic.Field(default=None) """ USD input cost """ - output_cost: typing.Optional[float] = pydantic_v1.Field( - alias="outputCost", default=None - ) + output_cost: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="outputCost") + ] = pydantic.Field(default=None) """ USD output cost """ - total_cost: typing.Optional[float] = pydantic_v1.Field( - alias="totalCost", default=None - ) + total_cost: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="totalCost") + ] = pydantic.Field(default=None) """ USD total cost, defaults to input+output """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/dataset_items/__init__.py b/langfuse/api/resources/dataset_items/__init__.py index 06d2ae527..4d009d228 100644 --- a/langfuse/api/resources/dataset_items/__init__.py +++ b/langfuse/api/resources/dataset_items/__init__.py @@ -1,10 +1,49 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - CreateDatasetItemRequest, - DeleteDatasetItemResponse, - PaginatedDatasetItems, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + CreateDatasetItemRequest, + DeleteDatasetItemResponse, + PaginatedDatasetItems, + ) +_dynamic_imports: typing.Dict[str, str] = { + "CreateDatasetItemRequest": ".types", + "DeleteDatasetItemResponse": ".types", + "PaginatedDatasetItems": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "CreateDatasetItemRequest", diff --git a/langfuse/api/resources/dataset_items/client.py b/langfuse/api/resources/dataset_items/client.py index 8ece3a790..dc284ff1d 100644 --- a/langfuse/api/resources/dataset_items/client.py +++ b/langfuse/api/resources/dataset_items/client.py @@ -1,19 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.dataset_item import DatasetItem +from .raw_client import AsyncRawDatasetItemsClient, RawDatasetItemsClient from .types.create_dataset_item_request import CreateDatasetItemRequest from .types.delete_dataset_item_response import DeleteDatasetItemResponse from .types.paginated_dataset_items import PaginatedDatasetItems @@ -24,7 +16,18 @@ class DatasetItemsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawDatasetItemsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawDatasetItemsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawDatasetItemsClient + """ + return self._raw_client def create( self, @@ -48,8 +51,8 @@ def create( Examples -------- - from langfuse import CreateDatasetItemRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.dataset_items import CreateDatasetItemRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -65,38 +68,10 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-items", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -117,7 +92,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -131,36 +106,8 @@ def get( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/dataset-items/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def list( self, @@ -198,7 +145,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -210,43 +157,15 @@ def list( ) client.dataset_items.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-items", - method="GET", - params={ - "datasetName": dataset_name, - "sourceTraceId": source_trace_id, - "sourceObservationId": source_observation_id, - "page": page, - "limit": limit, - }, + _response = self._raw_client.list( + dataset_name=dataset_name, + source_trace_id=source_trace_id, + source_observation_id=source_observation_id, + page=page, + limit=limit, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDatasetItems, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -267,7 +186,7 @@ def delete( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -281,43 +200,24 @@ def delete( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/dataset-items/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteDatasetItemResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data class AsyncDatasetItemsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawDatasetItemsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawDatasetItemsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawDatasetItemsClient + """ + return self._raw_client async def create( self, @@ -343,8 +243,8 @@ async def create( -------- import asyncio - from langfuse import CreateDatasetItemRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.dataset_items import CreateDatasetItemRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -366,38 +266,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/dataset-items", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -420,7 +292,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -440,36 +312,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/dataset-items/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def list( self, @@ -509,7 +353,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -527,43 +371,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/dataset-items", - method="GET", - params={ - "datasetName": dataset_name, - "sourceTraceId": source_trace_id, - "sourceObservationId": source_observation_id, - "page": page, - "limit": limit, - }, + _response = await self._raw_client.list( + dataset_name=dataset_name, + source_trace_id=source_trace_id, + source_observation_id=source_observation_id, + page=page, + limit=limit, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDatasetItems, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -586,7 +402,7 @@ async def delete( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -606,35 +422,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/dataset-items/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteDatasetItemResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data diff --git a/langfuse/api/resources/dataset_items/raw_client.py b/langfuse/api/resources/dataset_items/raw_client.py new file mode 100644 index 000000000..ce4759e5b --- /dev/null +++ b/langfuse/api/resources/dataset_items/raw_client.py @@ -0,0 +1,896 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.dataset_item import DatasetItem +from .types.create_dataset_item_request import CreateDatasetItemRequest +from .types.delete_dataset_item_response import DeleteDatasetItemResponse +from .types.paginated_dataset_items import PaginatedDatasetItems + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawDatasetItemsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + request: CreateDatasetItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DatasetItem]: + """ + Create a dataset item + + Parameters + ---------- + request : CreateDatasetItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DatasetItem] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/dataset-items", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateDatasetItemRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetItem, + parse_obj_as( + type_=DatasetItem, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[DatasetItem]: + """ + Get a dataset item + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DatasetItem] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/dataset-items/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetItem, + parse_obj_as( + type_=DatasetItem, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list( + self, + *, + dataset_name: typing.Optional[str] = None, + source_trace_id: typing.Optional[str] = None, + source_observation_id: typing.Optional[str] = None, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedDatasetItems]: + """ + Get dataset items + + Parameters + ---------- + dataset_name : typing.Optional[str] + + source_trace_id : typing.Optional[str] + + source_observation_id : typing.Optional[str] + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedDatasetItems] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/dataset-items", + method="GET", + params={ + "datasetName": dataset_name, + "sourceTraceId": source_trace_id, + "sourceObservationId": source_observation_id, + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasetItems, + parse_obj_as( + type_=PaginatedDatasetItems, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[DeleteDatasetItemResponse]: + """ + Delete a dataset item and all its run items. This action is irreversible. + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeleteDatasetItemResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/dataset-items/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteDatasetItemResponse, + parse_obj_as( + type_=DeleteDatasetItemResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawDatasetItemsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + request: CreateDatasetItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DatasetItem]: + """ + Create a dataset item + + Parameters + ---------- + request : CreateDatasetItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DatasetItem] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/dataset-items", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateDatasetItemRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetItem, + parse_obj_as( + type_=DatasetItem, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[DatasetItem]: + """ + Get a dataset item + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DatasetItem] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/dataset-items/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetItem, + parse_obj_as( + type_=DatasetItem, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list( + self, + *, + dataset_name: typing.Optional[str] = None, + source_trace_id: typing.Optional[str] = None, + source_observation_id: typing.Optional[str] = None, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedDatasetItems]: + """ + Get dataset items + + Parameters + ---------- + dataset_name : typing.Optional[str] + + source_trace_id : typing.Optional[str] + + source_observation_id : typing.Optional[str] + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedDatasetItems] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/dataset-items", + method="GET", + params={ + "datasetName": dataset_name, + "sourceTraceId": source_trace_id, + "sourceObservationId": source_observation_id, + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasetItems, + parse_obj_as( + type_=PaginatedDatasetItems, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[DeleteDatasetItemResponse]: + """ + Delete a dataset item and all its run items. This action is irreversible. + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeleteDatasetItemResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/dataset-items/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteDatasetItemResponse, + parse_obj_as( + type_=DeleteDatasetItemResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/dataset_items/types/__init__.py b/langfuse/api/resources/dataset_items/types/__init__.py index 214adce0e..c7ce59bf4 100644 --- a/langfuse/api/resources/dataset_items/types/__init__.py +++ b/langfuse/api/resources/dataset_items/types/__init__.py @@ -1,8 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -from .create_dataset_item_request import CreateDatasetItemRequest -from .delete_dataset_item_response import DeleteDatasetItemResponse -from .paginated_dataset_items import PaginatedDatasetItems +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_dataset_item_request import CreateDatasetItemRequest + from .delete_dataset_item_response import DeleteDatasetItemResponse + from .paginated_dataset_items import PaginatedDatasetItems +_dynamic_imports: typing.Dict[str, str] = { + "CreateDatasetItemRequest": ".create_dataset_item_request", + "DeleteDatasetItemResponse": ".delete_dataset_item_response", + "PaginatedDatasetItems": ".paginated_dataset_items", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "CreateDatasetItemRequest", diff --git a/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py b/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py index 111f6819a..bec2a0803 100644 --- a/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py +++ b/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py @@ -1,65 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.dataset_status import DatasetStatus -class CreateDatasetItemRequest(pydantic_v1.BaseModel): - dataset_name: str = pydantic_v1.Field(alias="datasetName") +class CreateDatasetItemRequest(UniversalBaseModel): + dataset_name: typing_extensions.Annotated[str, FieldMetadata(alias="datasetName")] input: typing.Optional[typing.Any] = None - expected_output: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="expectedOutput", default=None - ) + expected_output: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="expectedOutput") + ] = None metadata: typing.Optional[typing.Any] = None - source_trace_id: typing.Optional[str] = pydantic_v1.Field( - alias="sourceTraceId", default=None - ) - source_observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="sourceObservationId", default=None - ) - id: typing.Optional[str] = pydantic_v1.Field(default=None) + source_trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sourceTraceId") + ] = None + source_observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sourceObservationId") + ] = None + id: typing.Optional[str] = pydantic.Field(default=None) """ Dataset items are upserted on their id. Id needs to be unique (project-level) and cannot be reused across datasets. """ - status: typing.Optional[DatasetStatus] = pydantic_v1.Field(default=None) + status: typing.Optional[DatasetStatus] = pydantic.Field(default=None) """ Defaults to ACTIVE for newly created items """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/dataset_items/types/delete_dataset_item_response.py b/langfuse/api/resources/dataset_items/types/delete_dataset_item_response.py index 4d700ff75..8904ae982 100644 --- a/langfuse/api/resources/dataset_items/types/delete_dataset_item_response.py +++ b/langfuse/api/resources/dataset_items/types/delete_dataset_item_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DeleteDatasetItemResponse(pydantic_v1.BaseModel): - message: str = pydantic_v1.Field() +class DeleteDatasetItemResponse(UniversalBaseModel): + message: str = pydantic.Field() """ Success message after deletion """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py b/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py index 8592ba80f..aaeee1373 100644 --- a/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py +++ b/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.dataset_item import DatasetItem from ...utils.resources.pagination.types.meta_response import MetaResponse -class PaginatedDatasetItems(pydantic_v1.BaseModel): +class PaginatedDatasetItems(UniversalBaseModel): data: typing.List[DatasetItem] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/dataset_run_items/__init__.py b/langfuse/api/resources/dataset_run_items/__init__.py index d522a3129..9ff4e097e 100644 --- a/langfuse/api/resources/dataset_run_items/__init__.py +++ b/langfuse/api/resources/dataset_run_items/__init__.py @@ -1,5 +1,43 @@ # This file was auto-generated by Fern from our API Definition. -from .types import CreateDatasetRunItemRequest, PaginatedDatasetRunItems +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import CreateDatasetRunItemRequest, PaginatedDatasetRunItems +_dynamic_imports: typing.Dict[str, str] = { + "CreateDatasetRunItemRequest": ".types", + "PaginatedDatasetRunItems": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateDatasetRunItemRequest", "PaginatedDatasetRunItems"] diff --git a/langfuse/api/resources/dataset_run_items/client.py b/langfuse/api/resources/dataset_run_items/client.py index 3664fde96..2ae6cb7eb 100644 --- a/langfuse/api/resources/dataset_run_items/client.py +++ b/langfuse/api/resources/dataset_run_items/client.py @@ -1,18 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.dataset_run_item import DatasetRunItem +from .raw_client import AsyncRawDatasetRunItemsClient, RawDatasetRunItemsClient from .types.create_dataset_run_item_request import CreateDatasetRunItemRequest from .types.paginated_dataset_run_items import PaginatedDatasetRunItems @@ -22,7 +15,18 @@ class DatasetRunItemsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawDatasetRunItemsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawDatasetRunItemsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawDatasetRunItemsClient + """ + return self._raw_client def create( self, @@ -46,8 +50,8 @@ def create( Examples -------- - from langfuse import CreateDatasetRunItemRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.dataset_run_items import CreateDatasetRunItemRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -64,38 +68,10 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-run-items", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetRunItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def list( self, @@ -130,7 +106,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -145,49 +121,30 @@ def list( run_name="runName", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-run-items", - method="GET", - params={ - "datasetId": dataset_id, - "runName": run_name, - "page": page, - "limit": limit, - }, + _response = self._raw_client.list( + dataset_id=dataset_id, + run_name=run_name, + page=page, + limit=limit, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedDatasetRunItems, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncDatasetRunItemsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawDatasetRunItemsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawDatasetRunItemsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawDatasetRunItemsClient + """ + return self._raw_client async def create( self, @@ -213,8 +170,8 @@ async def create( -------- import asyncio - from langfuse import CreateDatasetRunItemRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.dataset_run_items import CreateDatasetRunItemRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -237,38 +194,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/dataset-run-items", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetRunItem, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list( self, @@ -305,7 +234,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -326,41 +255,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/dataset-run-items", - method="GET", - params={ - "datasetId": dataset_id, - "runName": run_name, - "page": page, - "limit": limit, - }, + _response = await self._raw_client.list( + dataset_id=dataset_id, + run_name=run_name, + page=page, + limit=limit, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedDatasetRunItems, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/dataset_run_items/raw_client.py b/langfuse/api/resources/dataset_run_items/raw_client.py new file mode 100644 index 000000000..85c51d70e --- /dev/null +++ b/langfuse/api/resources/dataset_run_items/raw_client.py @@ -0,0 +1,490 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.dataset_run_item import DatasetRunItem +from .types.create_dataset_run_item_request import CreateDatasetRunItemRequest +from .types.paginated_dataset_run_items import PaginatedDatasetRunItems + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawDatasetRunItemsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + request: CreateDatasetRunItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DatasetRunItem]: + """ + Create a dataset run item + + Parameters + ---------- + request : CreateDatasetRunItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DatasetRunItem] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/dataset-run-items", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateDatasetRunItemRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetRunItem, + parse_obj_as( + type_=DatasetRunItem, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list( + self, + *, + dataset_id: str, + run_name: str, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedDatasetRunItems]: + """ + List dataset run items + + Parameters + ---------- + dataset_id : str + + run_name : str + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedDatasetRunItems] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/dataset-run-items", + method="GET", + params={ + "datasetId": dataset_id, + "runName": run_name, + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasetRunItems, + parse_obj_as( + type_=PaginatedDatasetRunItems, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawDatasetRunItemsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + request: CreateDatasetRunItemRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DatasetRunItem]: + """ + Create a dataset run item + + Parameters + ---------- + request : CreateDatasetRunItemRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DatasetRunItem] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/dataset-run-items", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=CreateDatasetRunItemRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetRunItem, + parse_obj_as( + type_=DatasetRunItem, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list( + self, + *, + dataset_id: str, + run_name: str, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedDatasetRunItems]: + """ + List dataset run items + + Parameters + ---------- + dataset_id : str + + run_name : str + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedDatasetRunItems] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/dataset-run-items", + method="GET", + params={ + "datasetId": dataset_id, + "runName": run_name, + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasetRunItems, + parse_obj_as( + type_=PaginatedDatasetRunItems, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/dataset_run_items/types/__init__.py b/langfuse/api/resources/dataset_run_items/types/__init__.py index e48e72c27..b520924c0 100644 --- a/langfuse/api/resources/dataset_run_items/types/__init__.py +++ b/langfuse/api/resources/dataset_run_items/types/__init__.py @@ -1,6 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .create_dataset_run_item_request import CreateDatasetRunItemRequest -from .paginated_dataset_run_items import PaginatedDatasetRunItems +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_dataset_run_item_request import CreateDatasetRunItemRequest + from .paginated_dataset_run_items import PaginatedDatasetRunItems +_dynamic_imports: typing.Dict[str, str] = { + "CreateDatasetRunItemRequest": ".create_dataset_run_item_request", + "PaginatedDatasetRunItems": ".paginated_dataset_run_items", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateDatasetRunItemRequest", "PaginatedDatasetRunItems"] diff --git a/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py b/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py index 0a643b835..84060080f 100644 --- a/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py +++ b/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py @@ -1,64 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class CreateDatasetRunItemRequest(pydantic_v1.BaseModel): - run_name: str = pydantic_v1.Field(alias="runName") - run_description: typing.Optional[str] = pydantic_v1.Field( - alias="runDescription", default=None - ) +class CreateDatasetRunItemRequest(UniversalBaseModel): + run_name: typing_extensions.Annotated[str, FieldMetadata(alias="runName")] + run_description: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="runDescription") + ] = pydantic.Field(default=None) """ Description of the run. If run exists, description will be updated. """ - metadata: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + metadata: typing.Optional[typing.Any] = pydantic.Field(default=None) """ Metadata of the dataset run, updates run if run already exists """ - dataset_item_id: str = pydantic_v1.Field(alias="datasetItemId") - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) + dataset_item_id: typing_extensions.Annotated[ + str, FieldMetadata(alias="datasetItemId") + ] + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = pydantic.Field(default=None) """ traceId should always be provided. For compatibility with older SDK versions it can also be inferred from the provided observationId. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/dataset_run_items/types/paginated_dataset_run_items.py b/langfuse/api/resources/dataset_run_items/types/paginated_dataset_run_items.py index c1611bae0..23e6f3291 100644 --- a/langfuse/api/resources/dataset_run_items/types/paginated_dataset_run_items.py +++ b/langfuse/api/resources/dataset_run_items/types/paginated_dataset_run_items.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.dataset_run_item import DatasetRunItem from ...utils.resources.pagination.types.meta_response import MetaResponse -class PaginatedDatasetRunItems(pydantic_v1.BaseModel): +class PaginatedDatasetRunItems(UniversalBaseModel): data: typing.List[DatasetRunItem] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/datasets/__init__.py b/langfuse/api/resources/datasets/__init__.py index dd30a359d..e9005285e 100644 --- a/langfuse/api/resources/datasets/__init__.py +++ b/langfuse/api/resources/datasets/__init__.py @@ -1,11 +1,51 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - CreateDatasetRequest, - DeleteDatasetRunResponse, - PaginatedDatasetRuns, - PaginatedDatasets, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + CreateDatasetRequest, + DeleteDatasetRunResponse, + PaginatedDatasetRuns, + PaginatedDatasets, + ) +_dynamic_imports: typing.Dict[str, str] = { + "CreateDatasetRequest": ".types", + "DeleteDatasetRunResponse": ".types", + "PaginatedDatasetRuns": ".types", + "PaginatedDatasets": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "CreateDatasetRequest", diff --git a/langfuse/api/resources/datasets/client.py b/langfuse/api/resources/datasets/client.py index aff7293a0..169b1a4c7 100644 --- a/langfuse/api/resources/datasets/client.py +++ b/langfuse/api/resources/datasets/client.py @@ -1,20 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.dataset import Dataset from ..commons.types.dataset_run_with_items import DatasetRunWithItems +from .raw_client import AsyncRawDatasetsClient, RawDatasetsClient from .types.create_dataset_request import CreateDatasetRequest from .types.delete_dataset_run_response import DeleteDatasetRunResponse from .types.paginated_dataset_runs import PaginatedDatasetRuns @@ -26,7 +18,18 @@ class DatasetsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawDatasetsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawDatasetsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawDatasetsClient + """ + return self._raw_client def list( self, @@ -55,7 +58,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -67,37 +70,10 @@ def list( ) client.datasets.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/v2/datasets", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = self._raw_client.list( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDatasets, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, @@ -121,7 +97,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -135,36 +111,8 @@ def get( dataset_name="datasetName", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Dataset, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(dataset_name, request_options=request_options) + return _response.data def create( self, @@ -188,8 +136,8 @@ def create( Examples -------- - from langfuse import CreateDatasetRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.datasets import CreateDatasetRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -205,38 +153,10 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/v2/datasets", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Dataset, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_run( self, @@ -263,7 +183,7 @@ def get_run( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -278,36 +198,10 @@ def get_run( run_name="runName", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get_run( + dataset_name, run_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetRunWithItems, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_run( self, @@ -334,7 +228,7 @@ def delete_run( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -349,38 +243,10 @@ def delete_run( run_name="runName", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.delete_run( + dataset_name, run_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteDatasetRunResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_runs( self, @@ -412,7 +278,7 @@ def get_runs( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -426,42 +292,26 @@ def get_runs( dataset_name="datasetName", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = self._raw_client.get_runs( + dataset_name, page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDatasetRuns, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncDatasetsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawDatasetsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawDatasetsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawDatasetsClient + """ + return self._raw_client async def list( self, @@ -492,7 +342,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -510,37 +360,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/datasets", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = await self._raw_client.list( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDatasets, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, @@ -566,7 +389,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -586,36 +409,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get( + dataset_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Dataset, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -641,8 +438,8 @@ async def create( -------- import asyncio - from langfuse import CreateDatasetRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.datasets import CreateDatasetRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -664,38 +461,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/datasets", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Dataset, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_run( self, @@ -724,7 +493,7 @@ async def get_run( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -745,36 +514,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_run( + dataset_name, run_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetRunWithItems, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_run( self, @@ -803,7 +546,7 @@ async def delete_run( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -824,38 +567,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete_run( + dataset_name, run_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - DeleteDatasetRunResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_runs( self, @@ -889,7 +604,7 @@ async def get_runs( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -909,34 +624,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = await self._raw_client.get_runs( + dataset_name, page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDatasetRuns, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/datasets/raw_client.py b/langfuse/api/resources/datasets/raw_client.py new file mode 100644 index 000000000..6d87055fc --- /dev/null +++ b/langfuse/api/resources/datasets/raw_client.py @@ -0,0 +1,1334 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.dataset import Dataset +from ..commons.types.dataset_run_with_items import DatasetRunWithItems +from .types.create_dataset_request import CreateDatasetRequest +from .types.delete_dataset_run_response import DeleteDatasetRunResponse +from .types.paginated_dataset_runs import PaginatedDatasetRuns +from .types.paginated_datasets import PaginatedDatasets + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawDatasetsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedDatasets]: + """ + Get all datasets + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedDatasets] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/v2/datasets", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasets, + parse_obj_as( + type_=PaginatedDatasets, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get( + self, + dataset_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Dataset]: + """ + Get a dataset + + Parameters + ---------- + dataset_name : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Dataset] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Dataset, + parse_obj_as( + type_=Dataset, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create( + self, + *, + request: CreateDatasetRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Dataset]: + """ + Create a dataset + + Parameters + ---------- + request : CreateDatasetRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Dataset] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/v2/datasets", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateDatasetRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Dataset, + parse_obj_as( + type_=Dataset, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_run( + self, + dataset_name: str, + run_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DatasetRunWithItems]: + """ + Get a dataset run and its items + + Parameters + ---------- + dataset_name : str + + run_name : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DatasetRunWithItems] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetRunWithItems, + parse_obj_as( + type_=DatasetRunWithItems, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_run( + self, + dataset_name: str, + run_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeleteDatasetRunResponse]: + """ + Delete a dataset run and all its run items. This action is irreversible. + + Parameters + ---------- + dataset_name : str + + run_name : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeleteDatasetRunResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteDatasetRunResponse, + parse_obj_as( + type_=DeleteDatasetRunResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_runs( + self, + dataset_name: str, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedDatasetRuns]: + """ + Get dataset runs + + Parameters + ---------- + dataset_name : str + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedDatasetRuns] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasetRuns, + parse_obj_as( + type_=PaginatedDatasetRuns, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawDatasetsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedDatasets]: + """ + Get all datasets + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedDatasets] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/v2/datasets", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasets, + parse_obj_as( + type_=PaginatedDatasets, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get( + self, + dataset_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Dataset]: + """ + Get a dataset + + Parameters + ---------- + dataset_name : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Dataset] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Dataset, + parse_obj_as( + type_=Dataset, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create( + self, + *, + request: CreateDatasetRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Dataset]: + """ + Create a dataset + + Parameters + ---------- + request : CreateDatasetRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Dataset] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/v2/datasets", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateDatasetRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Dataset, + parse_obj_as( + type_=Dataset, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_run( + self, + dataset_name: str, + run_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DatasetRunWithItems]: + """ + Get a dataset run and its items + + Parameters + ---------- + dataset_name : str + + run_name : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DatasetRunWithItems] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DatasetRunWithItems, + parse_obj_as( + type_=DatasetRunWithItems, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_run( + self, + dataset_name: str, + run_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeleteDatasetRunResponse]: + """ + Delete a dataset run and all its run items. This action is irreversible. + + Parameters + ---------- + dataset_name : str + + run_name : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeleteDatasetRunResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs/{jsonable_encoder(run_name)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteDatasetRunResponse, + parse_obj_as( + type_=DeleteDatasetRunResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_runs( + self, + dataset_name: str, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedDatasetRuns]: + """ + Get dataset runs + + Parameters + ---------- + dataset_name : str + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedDatasetRuns] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/datasets/{jsonable_encoder(dataset_name)}/runs", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedDatasetRuns, + parse_obj_as( + type_=PaginatedDatasetRuns, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/datasets/types/__init__.py b/langfuse/api/resources/datasets/types/__init__.py index f3304a59f..60c58934a 100644 --- a/langfuse/api/resources/datasets/types/__init__.py +++ b/langfuse/api/resources/datasets/types/__init__.py @@ -1,9 +1,49 @@ # This file was auto-generated by Fern from our API Definition. -from .create_dataset_request import CreateDatasetRequest -from .delete_dataset_run_response import DeleteDatasetRunResponse -from .paginated_dataset_runs import PaginatedDatasetRuns -from .paginated_datasets import PaginatedDatasets +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_dataset_request import CreateDatasetRequest + from .delete_dataset_run_response import DeleteDatasetRunResponse + from .paginated_dataset_runs import PaginatedDatasetRuns + from .paginated_datasets import PaginatedDatasets +_dynamic_imports: typing.Dict[str, str] = { + "CreateDatasetRequest": ".create_dataset_request", + "DeleteDatasetRunResponse": ".delete_dataset_run_response", + "PaginatedDatasetRuns": ".paginated_dataset_runs", + "PaginatedDatasets": ".paginated_datasets", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "CreateDatasetRequest", diff --git a/langfuse/api/resources/datasets/types/create_dataset_request.py b/langfuse/api/resources/datasets/types/create_dataset_request.py index 228527909..81e5de83a 100644 --- a/langfuse/api/resources/datasets/types/create_dataset_request.py +++ b/langfuse/api/resources/datasets/types/create_dataset_request.py @@ -1,59 +1,38 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class CreateDatasetRequest(pydantic_v1.BaseModel): +class CreateDatasetRequest(UniversalBaseModel): name: str description: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - input_schema: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="inputSchema", default=None - ) + input_schema: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="inputSchema") + ] = pydantic.Field(default=None) """ JSON Schema for validating dataset item inputs. When set, all new and existing dataset items will be validated against this schema. """ - expected_output_schema: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="expectedOutputSchema", default=None - ) + expected_output_schema: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="expectedOutputSchema") + ] = pydantic.Field(default=None) """ JSON Schema for validating dataset item expected outputs. When set, all new and existing dataset items will be validated against this schema. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/datasets/types/delete_dataset_run_response.py b/langfuse/api/resources/datasets/types/delete_dataset_run_response.py index cf52eca14..7898674c7 100644 --- a/langfuse/api/resources/datasets/types/delete_dataset_run_response.py +++ b/langfuse/api/resources/datasets/types/delete_dataset_run_response.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DeleteDatasetRunResponse(pydantic_v1.BaseModel): +class DeleteDatasetRunResponse(UniversalBaseModel): message: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/datasets/types/paginated_dataset_runs.py b/langfuse/api/resources/datasets/types/paginated_dataset_runs.py index 86f2f0a73..518037f48 100644 --- a/langfuse/api/resources/datasets/types/paginated_dataset_runs.py +++ b/langfuse/api/resources/datasets/types/paginated_dataset_runs.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.dataset_run import DatasetRun from ...utils.resources.pagination.types.meta_response import MetaResponse -class PaginatedDatasetRuns(pydantic_v1.BaseModel): +class PaginatedDatasetRuns(UniversalBaseModel): data: typing.List[DatasetRun] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/datasets/types/paginated_datasets.py b/langfuse/api/resources/datasets/types/paginated_datasets.py index c2d436bf4..72fb054ca 100644 --- a/langfuse/api/resources/datasets/types/paginated_datasets.py +++ b/langfuse/api/resources/datasets/types/paginated_datasets.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.dataset import Dataset from ...utils.resources.pagination.types.meta_response import MetaResponse -class PaginatedDatasets(pydantic_v1.BaseModel): +class PaginatedDatasets(UniversalBaseModel): data: typing.List[Dataset] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/health/__init__.py b/langfuse/api/resources/health/__init__.py index f468cdffb..2a101c5f7 100644 --- a/langfuse/api/resources/health/__init__.py +++ b/langfuse/api/resources/health/__init__.py @@ -1,6 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .types import HealthResponse -from .errors import ServiceUnavailableError +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import HealthResponse + from .errors import ServiceUnavailableError +_dynamic_imports: typing.Dict[str, str] = { + "HealthResponse": ".types", + "ServiceUnavailableError": ".errors", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["HealthResponse", "ServiceUnavailableError"] diff --git a/langfuse/api/resources/health/client.py b/langfuse/api/resources/health/client.py index 029be7a0c..f7c303365 100644 --- a/langfuse/api/resources/health/client.py +++ b/langfuse/api/resources/health/client.py @@ -1,24 +1,27 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError -from .errors.service_unavailable_error import ServiceUnavailableError +from .raw_client import AsyncRawHealthClient, RawHealthClient from .types.health_response import HealthResponse class HealthClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawHealthClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawHealthClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawHealthClient + """ + return self._raw_client def health( self, *, request_options: typing.Optional[RequestOptions] = None @@ -37,7 +40,7 @@ def health( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -49,41 +52,24 @@ def health( ) client.health.health() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/health", method="GET", request_options=request_options - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(HealthResponse, _response.json()) # type: ignore - if _response.status_code == 503: - raise ServiceUnavailableError() - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.health(request_options=request_options) + return _response.data class AsyncHealthClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawHealthClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawHealthClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawHealthClient + """ + return self._raw_client async def health( self, *, request_options: typing.Optional[RequestOptions] = None @@ -104,7 +90,7 @@ async def health( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -122,33 +108,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/health", method="GET", request_options=request_options - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(HealthResponse, _response.json()) # type: ignore - if _response.status_code == 503: - raise ServiceUnavailableError() - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.health(request_options=request_options) + return _response.data diff --git a/langfuse/api/resources/health/errors/__init__.py b/langfuse/api/resources/health/errors/__init__.py index 46bb3fedd..3567f86b3 100644 --- a/langfuse/api/resources/health/errors/__init__.py +++ b/langfuse/api/resources/health/errors/__init__.py @@ -1,5 +1,42 @@ # This file was auto-generated by Fern from our API Definition. -from .service_unavailable_error import ServiceUnavailableError +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .service_unavailable_error import ServiceUnavailableError +_dynamic_imports: typing.Dict[str, str] = { + "ServiceUnavailableError": ".service_unavailable_error" +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["ServiceUnavailableError"] diff --git a/langfuse/api/resources/health/errors/service_unavailable_error.py b/langfuse/api/resources/health/errors/service_unavailable_error.py index acfd8fbf3..771b2c5ee 100644 --- a/langfuse/api/resources/health/errors/service_unavailable_error.py +++ b/langfuse/api/resources/health/errors/service_unavailable_error.py @@ -1,8 +1,13 @@ # This file was auto-generated by Fern from our API Definition. +import typing + from ....core.api_error import ApiError class ServiceUnavailableError(ApiError): - def __init__(self) -> None: - super().__init__(status_code=503) + def __init__(self, headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__( + status_code=503, + headers=headers, + ) diff --git a/langfuse/api/resources/health/raw_client.py b/langfuse/api/resources/health/raw_client.py new file mode 100644 index 000000000..2bb3c0ca3 --- /dev/null +++ b/langfuse/api/resources/health/raw_client.py @@ -0,0 +1,227 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .errors.service_unavailable_error import ServiceUnavailableError +from .types.health_response import HealthResponse + + +class RawHealthClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def health( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[HealthResponse]: + """ + Check health of API and database + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[HealthResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/health", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + HealthResponse, + parse_obj_as( + type_=HealthResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 503: + raise ServiceUnavailableError(headers=dict(_response.headers)) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawHealthClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def health( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[HealthResponse]: + """ + Check health of API and database + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[HealthResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/health", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + HealthResponse, + parse_obj_as( + type_=HealthResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 503: + raise ServiceUnavailableError(headers=dict(_response.headers)) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/health/types/__init__.py b/langfuse/api/resources/health/types/__init__.py index 5fb7ec574..d4bec6804 100644 --- a/langfuse/api/resources/health/types/__init__.py +++ b/langfuse/api/resources/health/types/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .health_response import HealthResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .health_response import HealthResponse +_dynamic_imports: typing.Dict[str, str] = {"HealthResponse": ".health_response"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["HealthResponse"] diff --git a/langfuse/api/resources/health/types/health_response.py b/langfuse/api/resources/health/types/health_response.py index 633da67a8..0e38d96cf 100644 --- a/langfuse/api/resources/health/types/health_response.py +++ b/langfuse/api/resources/health/types/health_response.py @@ -1,17 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class HealthResponse(pydantic_v1.BaseModel): +class HealthResponse(UniversalBaseModel): """ Examples -------- - from langfuse import HealthResponse + from langfuse.resources.health import HealthResponse HealthResponse( version="1.25.0", @@ -19,40 +18,20 @@ class HealthResponse(pydantic_v1.BaseModel): ) """ - version: str = pydantic_v1.Field() + version: str = pydantic.Field() """ Langfuse server version """ status: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/__init__.py b/langfuse/api/resources/ingestion/__init__.py index 9e072dc17..5cd4ba3bd 100644 --- a/langfuse/api/resources/ingestion/__init__.py +++ b/langfuse/api/resources/ingestion/__init__.py @@ -1,49 +1,127 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - BaseEvent, - CreateEventBody, - CreateEventEvent, - CreateGenerationBody, - CreateGenerationEvent, - CreateObservationEvent, - CreateSpanBody, - CreateSpanEvent, - IngestionError, - IngestionEvent, - IngestionEvent_EventCreate, - IngestionEvent_GenerationCreate, - IngestionEvent_GenerationUpdate, - IngestionEvent_ObservationCreate, - IngestionEvent_ObservationUpdate, - IngestionEvent_ScoreCreate, - IngestionEvent_SdkLog, - IngestionEvent_SpanCreate, - IngestionEvent_SpanUpdate, - IngestionEvent_TraceCreate, - IngestionResponse, - IngestionSuccess, - IngestionUsage, - ObservationBody, - ObservationType, - OpenAiCompletionUsageSchema, - OpenAiResponseUsageSchema, - OpenAiUsage, - OptionalObservationBody, - ScoreBody, - ScoreEvent, - SdkLogBody, - SdkLogEvent, - TraceBody, - TraceEvent, - UpdateEventBody, - UpdateGenerationBody, - UpdateGenerationEvent, - UpdateObservationEvent, - UpdateSpanBody, - UpdateSpanEvent, - UsageDetails, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + BaseEvent, + CreateEventBody, + CreateEventEvent, + CreateGenerationBody, + CreateGenerationEvent, + CreateObservationEvent, + CreateSpanBody, + CreateSpanEvent, + IngestionError, + IngestionEvent, + IngestionEvent_EventCreate, + IngestionEvent_GenerationCreate, + IngestionEvent_GenerationUpdate, + IngestionEvent_ObservationCreate, + IngestionEvent_ObservationUpdate, + IngestionEvent_ScoreCreate, + IngestionEvent_SdkLog, + IngestionEvent_SpanCreate, + IngestionEvent_SpanUpdate, + IngestionEvent_TraceCreate, + IngestionResponse, + IngestionSuccess, + IngestionUsage, + ObservationBody, + ObservationType, + OpenAiCompletionUsageSchema, + OpenAiResponseUsageSchema, + OpenAiUsage, + OptionalObservationBody, + ScoreBody, + ScoreEvent, + SdkLogBody, + SdkLogEvent, + TraceBody, + TraceEvent, + UpdateEventBody, + UpdateGenerationBody, + UpdateGenerationEvent, + UpdateObservationEvent, + UpdateSpanBody, + UpdateSpanEvent, + UsageDetails, + ) +_dynamic_imports: typing.Dict[str, str] = { + "BaseEvent": ".types", + "CreateEventBody": ".types", + "CreateEventEvent": ".types", + "CreateGenerationBody": ".types", + "CreateGenerationEvent": ".types", + "CreateObservationEvent": ".types", + "CreateSpanBody": ".types", + "CreateSpanEvent": ".types", + "IngestionError": ".types", + "IngestionEvent": ".types", + "IngestionEvent_EventCreate": ".types", + "IngestionEvent_GenerationCreate": ".types", + "IngestionEvent_GenerationUpdate": ".types", + "IngestionEvent_ObservationCreate": ".types", + "IngestionEvent_ObservationUpdate": ".types", + "IngestionEvent_ScoreCreate": ".types", + "IngestionEvent_SdkLog": ".types", + "IngestionEvent_SpanCreate": ".types", + "IngestionEvent_SpanUpdate": ".types", + "IngestionEvent_TraceCreate": ".types", + "IngestionResponse": ".types", + "IngestionSuccess": ".types", + "IngestionUsage": ".types", + "ObservationBody": ".types", + "ObservationType": ".types", + "OpenAiCompletionUsageSchema": ".types", + "OpenAiResponseUsageSchema": ".types", + "OpenAiUsage": ".types", + "OptionalObservationBody": ".types", + "ScoreBody": ".types", + "ScoreEvent": ".types", + "SdkLogBody": ".types", + "SdkLogEvent": ".types", + "TraceBody": ".types", + "TraceEvent": ".types", + "UpdateEventBody": ".types", + "UpdateGenerationBody": ".types", + "UpdateGenerationEvent": ".types", + "UpdateObservationEvent": ".types", + "UpdateSpanBody": ".types", + "UpdateSpanEvent": ".types", + "UsageDetails": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BaseEvent", diff --git a/langfuse/api/resources/ingestion/client.py b/langfuse/api/resources/ingestion/client.py index c009c507b..780de986c 100644 --- a/langfuse/api/resources/ingestion/client.py +++ b/langfuse/api/resources/ingestion/client.py @@ -1,17 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawIngestionClient, RawIngestionClient from .types.ingestion_event import IngestionEvent from .types.ingestion_response import IngestionResponse @@ -21,7 +14,18 @@ class IngestionClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawIngestionClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawIngestionClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawIngestionClient + """ + return self._raw_client def batch( self, @@ -66,8 +70,8 @@ def batch( -------- import datetime - from langfuse import IngestionEvent_TraceCreate, TraceBody - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.ingestion import IngestionEvent_TraceCreate, TraceBody client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -103,43 +107,26 @@ def batch( ], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/ingestion", - method="POST", - json={"batch": batch, "metadata": metadata}, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.batch( + batch=batch, metadata=metadata, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(IngestionResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncIngestionClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawIngestionClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawIngestionClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawIngestionClient + """ + return self._raw_client async def batch( self, @@ -185,8 +172,8 @@ async def batch( import asyncio import datetime - from langfuse import IngestionEvent_TraceCreate, TraceBody - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.ingestion import IngestionEvent_TraceCreate, TraceBody client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -228,35 +215,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/ingestion", - method="POST", - json={"batch": batch, "metadata": metadata}, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.batch( + batch=batch, metadata=metadata, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(IngestionResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/ingestion/raw_client.py b/langfuse/api/resources/ingestion/raw_client.py new file mode 100644 index 000000000..9939f292d --- /dev/null +++ b/langfuse/api/resources/ingestion/raw_client.py @@ -0,0 +1,293 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.ingestion_event import IngestionEvent +from .types.ingestion_response import IngestionResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawIngestionClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def batch( + self, + *, + batch: typing.Sequence[IngestionEvent], + metadata: typing.Optional[typing.Any] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[IngestionResponse]: + """ + **Legacy endpoint for batch ingestion for Langfuse Observability.** + + -> Please use the OpenTelemetry endpoint (`/api/public/otel/v1/traces`). Learn more: https://langfuse.com/integrations/native/opentelemetry + + Within each batch, there can be multiple events. + Each event has a type, an id, a timestamp, metadata and a body. + Internally, we refer to this as the "event envelope" as it tells us something about the event but not the trace. + We use the event id within this envelope to deduplicate messages to avoid processing the same event twice, i.e. the event id should be unique per request. + The event.body.id is the ID of the actual trace and will be used for updates and will be visible within the Langfuse App. + I.e. if you want to update a trace, you'd use the same body id, but separate event IDs. + + Notes: + - Introduction to data model: https://langfuse.com/docs/observability/data-model + - Batch sizes are limited to 3.5 MB in total. You need to adjust the number of events per batch accordingly. + - The API does not return a 4xx status code for input errors. Instead, it responds with a 207 status code, which includes a list of the encountered errors. + + Parameters + ---------- + batch : typing.Sequence[IngestionEvent] + Batch of tracing events to be ingested. Discriminated by attribute `type`. + + metadata : typing.Optional[typing.Any] + Optional. Metadata field used by the Langfuse SDKs for debugging. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[IngestionResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/ingestion", + method="POST", + json={ + "batch": convert_and_respect_annotation_metadata( + object_=batch, + annotation=typing.Sequence[IngestionEvent], + direction="write", + ), + "metadata": metadata, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + IngestionResponse, + parse_obj_as( + type_=IngestionResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawIngestionClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def batch( + self, + *, + batch: typing.Sequence[IngestionEvent], + metadata: typing.Optional[typing.Any] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[IngestionResponse]: + """ + **Legacy endpoint for batch ingestion for Langfuse Observability.** + + -> Please use the OpenTelemetry endpoint (`/api/public/otel/v1/traces`). Learn more: https://langfuse.com/integrations/native/opentelemetry + + Within each batch, there can be multiple events. + Each event has a type, an id, a timestamp, metadata and a body. + Internally, we refer to this as the "event envelope" as it tells us something about the event but not the trace. + We use the event id within this envelope to deduplicate messages to avoid processing the same event twice, i.e. the event id should be unique per request. + The event.body.id is the ID of the actual trace and will be used for updates and will be visible within the Langfuse App. + I.e. if you want to update a trace, you'd use the same body id, but separate event IDs. + + Notes: + - Introduction to data model: https://langfuse.com/docs/observability/data-model + - Batch sizes are limited to 3.5 MB in total. You need to adjust the number of events per batch accordingly. + - The API does not return a 4xx status code for input errors. Instead, it responds with a 207 status code, which includes a list of the encountered errors. + + Parameters + ---------- + batch : typing.Sequence[IngestionEvent] + Batch of tracing events to be ingested. Discriminated by attribute `type`. + + metadata : typing.Optional[typing.Any] + Optional. Metadata field used by the Langfuse SDKs for debugging. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[IngestionResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/ingestion", + method="POST", + json={ + "batch": convert_and_respect_annotation_metadata( + object_=batch, + annotation=typing.Sequence[IngestionEvent], + direction="write", + ), + "metadata": metadata, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + IngestionResponse, + parse_obj_as( + type_=IngestionResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/ingestion/types/__init__.py b/langfuse/api/resources/ingestion/types/__init__.py index a3490e4dc..4addfd9c7 100644 --- a/langfuse/api/resources/ingestion/types/__init__.py +++ b/langfuse/api/resources/ingestion/types/__init__.py @@ -1,49 +1,127 @@ # This file was auto-generated by Fern from our API Definition. -from .base_event import BaseEvent -from .create_event_body import CreateEventBody -from .create_event_event import CreateEventEvent -from .create_generation_body import CreateGenerationBody -from .create_generation_event import CreateGenerationEvent -from .create_observation_event import CreateObservationEvent -from .create_span_body import CreateSpanBody -from .create_span_event import CreateSpanEvent -from .ingestion_error import IngestionError -from .ingestion_event import ( - IngestionEvent, - IngestionEvent_EventCreate, - IngestionEvent_GenerationCreate, - IngestionEvent_GenerationUpdate, - IngestionEvent_ObservationCreate, - IngestionEvent_ObservationUpdate, - IngestionEvent_ScoreCreate, - IngestionEvent_SdkLog, - IngestionEvent_SpanCreate, - IngestionEvent_SpanUpdate, - IngestionEvent_TraceCreate, -) -from .ingestion_response import IngestionResponse -from .ingestion_success import IngestionSuccess -from .ingestion_usage import IngestionUsage -from .observation_body import ObservationBody -from .observation_type import ObservationType -from .open_ai_completion_usage_schema import OpenAiCompletionUsageSchema -from .open_ai_response_usage_schema import OpenAiResponseUsageSchema -from .open_ai_usage import OpenAiUsage -from .optional_observation_body import OptionalObservationBody -from .score_body import ScoreBody -from .score_event import ScoreEvent -from .sdk_log_body import SdkLogBody -from .sdk_log_event import SdkLogEvent -from .trace_body import TraceBody -from .trace_event import TraceEvent -from .update_event_body import UpdateEventBody -from .update_generation_body import UpdateGenerationBody -from .update_generation_event import UpdateGenerationEvent -from .update_observation_event import UpdateObservationEvent -from .update_span_body import UpdateSpanBody -from .update_span_event import UpdateSpanEvent -from .usage_details import UsageDetails +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .base_event import BaseEvent + from .create_event_body import CreateEventBody + from .create_event_event import CreateEventEvent + from .create_generation_body import CreateGenerationBody + from .create_generation_event import CreateGenerationEvent + from .create_observation_event import CreateObservationEvent + from .create_span_body import CreateSpanBody + from .create_span_event import CreateSpanEvent + from .ingestion_error import IngestionError + from .ingestion_event import ( + IngestionEvent, + IngestionEvent_EventCreate, + IngestionEvent_GenerationCreate, + IngestionEvent_GenerationUpdate, + IngestionEvent_ObservationCreate, + IngestionEvent_ObservationUpdate, + IngestionEvent_ScoreCreate, + IngestionEvent_SdkLog, + IngestionEvent_SpanCreate, + IngestionEvent_SpanUpdate, + IngestionEvent_TraceCreate, + ) + from .ingestion_response import IngestionResponse + from .ingestion_success import IngestionSuccess + from .ingestion_usage import IngestionUsage + from .observation_body import ObservationBody + from .observation_type import ObservationType + from .open_ai_completion_usage_schema import OpenAiCompletionUsageSchema + from .open_ai_response_usage_schema import OpenAiResponseUsageSchema + from .open_ai_usage import OpenAiUsage + from .optional_observation_body import OptionalObservationBody + from .score_body import ScoreBody + from .score_event import ScoreEvent + from .sdk_log_body import SdkLogBody + from .sdk_log_event import SdkLogEvent + from .trace_body import TraceBody + from .trace_event import TraceEvent + from .update_event_body import UpdateEventBody + from .update_generation_body import UpdateGenerationBody + from .update_generation_event import UpdateGenerationEvent + from .update_observation_event import UpdateObservationEvent + from .update_span_body import UpdateSpanBody + from .update_span_event import UpdateSpanEvent + from .usage_details import UsageDetails +_dynamic_imports: typing.Dict[str, str] = { + "BaseEvent": ".base_event", + "CreateEventBody": ".create_event_body", + "CreateEventEvent": ".create_event_event", + "CreateGenerationBody": ".create_generation_body", + "CreateGenerationEvent": ".create_generation_event", + "CreateObservationEvent": ".create_observation_event", + "CreateSpanBody": ".create_span_body", + "CreateSpanEvent": ".create_span_event", + "IngestionError": ".ingestion_error", + "IngestionEvent": ".ingestion_event", + "IngestionEvent_EventCreate": ".ingestion_event", + "IngestionEvent_GenerationCreate": ".ingestion_event", + "IngestionEvent_GenerationUpdate": ".ingestion_event", + "IngestionEvent_ObservationCreate": ".ingestion_event", + "IngestionEvent_ObservationUpdate": ".ingestion_event", + "IngestionEvent_ScoreCreate": ".ingestion_event", + "IngestionEvent_SdkLog": ".ingestion_event", + "IngestionEvent_SpanCreate": ".ingestion_event", + "IngestionEvent_SpanUpdate": ".ingestion_event", + "IngestionEvent_TraceCreate": ".ingestion_event", + "IngestionResponse": ".ingestion_response", + "IngestionSuccess": ".ingestion_success", + "IngestionUsage": ".ingestion_usage", + "ObservationBody": ".observation_body", + "ObservationType": ".observation_type", + "OpenAiCompletionUsageSchema": ".open_ai_completion_usage_schema", + "OpenAiResponseUsageSchema": ".open_ai_response_usage_schema", + "OpenAiUsage": ".open_ai_usage", + "OptionalObservationBody": ".optional_observation_body", + "ScoreBody": ".score_body", + "ScoreEvent": ".score_event", + "SdkLogBody": ".sdk_log_body", + "SdkLogEvent": ".sdk_log_event", + "TraceBody": ".trace_body", + "TraceEvent": ".trace_event", + "UpdateEventBody": ".update_event_body", + "UpdateGenerationBody": ".update_generation_body", + "UpdateGenerationEvent": ".update_generation_event", + "UpdateObservationEvent": ".update_observation_event", + "UpdateSpanBody": ".update_span_body", + "UpdateSpanEvent": ".update_span_event", + "UsageDetails": ".usage_details", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BaseEvent", diff --git a/langfuse/api/resources/ingestion/types/base_event.py b/langfuse/api/resources/ingestion/types/base_event.py index dec8a52e7..2fc9bac0f 100644 --- a/langfuse/api/resources/ingestion/types/base_event.py +++ b/langfuse/api/resources/ingestion/types/base_event.py @@ -1,55 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class BaseEvent(pydantic_v1.BaseModel): - id: str = pydantic_v1.Field() +class BaseEvent(UniversalBaseModel): + id: str = pydantic.Field() """ UUID v4 that identifies the event """ - timestamp: str = pydantic_v1.Field() + timestamp: str = pydantic.Field() """ Datetime (ISO 8601) of event creation in client. Should be as close to actual event creation in client as possible, this timestamp will be used for ordering of events in future release. Resolution: milliseconds (required), microseconds (optimal). """ - metadata: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + metadata: typing.Optional[typing.Any] = pydantic.Field(default=None) """ Optional. Metadata field used by the Langfuse SDKs for debugging. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_event_body.py b/langfuse/api/resources/ingestion/types/create_event_body.py index afe8677f3..bf926edc1 100644 --- a/langfuse/api/resources/ingestion/types/create_event_body.py +++ b/langfuse/api/resources/ingestion/types/create_event_body.py @@ -1,45 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .optional_observation_body import OptionalObservationBody class CreateEventBody(OptionalObservationBody): id: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_event_event.py b/langfuse/api/resources/ingestion/types/create_event_event.py index 0c3cce040..2eb3ffaa5 100644 --- a/langfuse/api/resources/ingestion/types/create_event_event.py +++ b/langfuse/api/resources/ingestion/types/create_event_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .create_event_body import CreateEventBody @@ -12,35 +11,13 @@ class CreateEventEvent(BaseEvent): body: CreateEventBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_generation_body.py b/langfuse/api/resources/ingestion/types/create_generation_body.py index 428b58607..30dcdbf97 100644 --- a/langfuse/api/resources/ingestion/types/create_generation_body.py +++ b/langfuse/api/resources/ingestion/types/create_generation_body.py @@ -3,8 +3,10 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from ...commons.types.map_value import MapValue from .create_span_body import CreateSpanBody from .ingestion_usage import IngestionUsage @@ -12,56 +14,35 @@ class CreateGenerationBody(CreateSpanBody): - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="completionStartTime", default=None - ) + completion_start_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="completionStartTime") + ] = None model: typing.Optional[str] = None - model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( - alias="modelParameters", default=None - ) + model_parameters: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, MapValue]], + FieldMetadata(alias="modelParameters"), + ] = None usage: typing.Optional[IngestionUsage] = None - usage_details: typing.Optional[UsageDetails] = pydantic_v1.Field( - alias="usageDetails", default=None - ) - cost_details: typing.Optional[typing.Dict[str, float]] = pydantic_v1.Field( - alias="costDetails", default=None - ) - prompt_name: typing.Optional[str] = pydantic_v1.Field( - alias="promptName", default=None - ) - prompt_version: typing.Optional[int] = pydantic_v1.Field( - alias="promptVersion", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + usage_details: typing_extensions.Annotated[ + typing.Optional[UsageDetails], FieldMetadata(alias="usageDetails") + ] = None + cost_details: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, float]], FieldMetadata(alias="costDetails") + ] = None + prompt_name: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="promptName") + ] = None + prompt_version: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="promptVersion") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_generation_event.py b/langfuse/api/resources/ingestion/types/create_generation_event.py index cb7b484dd..8254faf96 100644 --- a/langfuse/api/resources/ingestion/types/create_generation_event.py +++ b/langfuse/api/resources/ingestion/types/create_generation_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .create_generation_body import CreateGenerationBody @@ -12,35 +11,13 @@ class CreateGenerationEvent(BaseEvent): body: CreateGenerationBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_observation_event.py b/langfuse/api/resources/ingestion/types/create_observation_event.py index adfefc793..c0ebf333c 100644 --- a/langfuse/api/resources/ingestion/types/create_observation_event.py +++ b/langfuse/api/resources/ingestion/types/create_observation_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .observation_body import ObservationBody @@ -12,35 +11,13 @@ class CreateObservationEvent(BaseEvent): body: ObservationBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_span_body.py b/langfuse/api/resources/ingestion/types/create_span_body.py index c31fde567..c6c7c3dfe 100644 --- a/langfuse/api/resources/ingestion/types/create_span_body.py +++ b/langfuse/api/resources/ingestion/types/create_span_body.py @@ -3,45 +3,25 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .create_event_body import CreateEventBody class CreateSpanBody(CreateEventBody): - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="endTime", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + end_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="endTime") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/create_span_event.py b/langfuse/api/resources/ingestion/types/create_span_event.py index 7a8e8154c..79bca12c3 100644 --- a/langfuse/api/resources/ingestion/types/create_span_event.py +++ b/langfuse/api/resources/ingestion/types/create_span_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .create_span_body import CreateSpanBody @@ -12,35 +11,13 @@ class CreateSpanEvent(BaseEvent): body: CreateSpanBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/ingestion_error.py b/langfuse/api/resources/ingestion/types/ingestion_error.py index b9028ce1d..ecb0dfecf 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_error.py +++ b/langfuse/api/resources/ingestion/types/ingestion_error.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class IngestionError(pydantic_v1.BaseModel): +class IngestionError(UniversalBaseModel): id: str status: int message: typing.Optional[str] = None error: typing.Optional[typing.Any] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/ingestion_event.py b/langfuse/api/resources/ingestion/types/ingestion_event.py index e083c9354..0802295cc 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_event.py +++ b/langfuse/api/resources/ingestion/types/ingestion_event.py @@ -2,11 +2,11 @@ from __future__ import annotations -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .create_event_body import CreateEventBody from .create_generation_body import CreateGenerationBody from .create_span_body import CreateSpanBody @@ -18,405 +18,208 @@ from .update_span_body import UpdateSpanBody -class IngestionEvent_TraceCreate(pydantic_v1.BaseModel): +class IngestionEvent_TraceCreate(UniversalBaseModel): + type: typing.Literal["trace-create"] = "trace-create" body: TraceBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["trace-create"] = "trace-create" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_ScoreCreate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_ScoreCreate(UniversalBaseModel): + type: typing.Literal["score-create"] = "score-create" body: ScoreBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["score-create"] = "score-create" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_SpanCreate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_SpanCreate(UniversalBaseModel): + type: typing.Literal["span-create"] = "span-create" body: CreateSpanBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["span-create"] = "span-create" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_SpanUpdate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_SpanUpdate(UniversalBaseModel): + type: typing.Literal["span-update"] = "span-update" body: UpdateSpanBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["span-update"] = "span-update" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_GenerationCreate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_GenerationCreate(UniversalBaseModel): + type: typing.Literal["generation-create"] = "generation-create" body: CreateGenerationBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["generation-create"] = "generation-create" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_GenerationUpdate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_GenerationUpdate(UniversalBaseModel): + type: typing.Literal["generation-update"] = "generation-update" body: UpdateGenerationBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["generation-update"] = "generation-update" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_EventCreate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_EventCreate(UniversalBaseModel): + type: typing.Literal["event-create"] = "event-create" body: CreateEventBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["event-create"] = "event-create" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_SdkLog(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_SdkLog(UniversalBaseModel): + type: typing.Literal["sdk-log"] = "sdk-log" body: SdkLogBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["sdk-log"] = "sdk-log" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_ObservationCreate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_ObservationCreate(UniversalBaseModel): + type: typing.Literal["observation-create"] = "observation-create" body: ObservationBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["observation-create"] = "observation-create" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class IngestionEvent_ObservationUpdate(pydantic_v1.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class IngestionEvent_ObservationUpdate(UniversalBaseModel): + type: typing.Literal["observation-update"] = "observation-update" body: ObservationBody id: str timestamp: str metadata: typing.Optional[typing.Any] = None - type: typing.Literal["observation-update"] = "observation-update" - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -IngestionEvent = typing.Union[ - IngestionEvent_TraceCreate, - IngestionEvent_ScoreCreate, - IngestionEvent_SpanCreate, - IngestionEvent_SpanUpdate, - IngestionEvent_GenerationCreate, - IngestionEvent_GenerationUpdate, - IngestionEvent_EventCreate, - IngestionEvent_SdkLog, - IngestionEvent_ObservationCreate, - IngestionEvent_ObservationUpdate, + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +IngestionEvent = typing_extensions.Annotated[ + typing.Union[ + IngestionEvent_TraceCreate, + IngestionEvent_ScoreCreate, + IngestionEvent_SpanCreate, + IngestionEvent_SpanUpdate, + IngestionEvent_GenerationCreate, + IngestionEvent_GenerationUpdate, + IngestionEvent_EventCreate, + IngestionEvent_SdkLog, + IngestionEvent_ObservationCreate, + IngestionEvent_ObservationUpdate, + ], + pydantic.Field(discriminator="type"), ] diff --git a/langfuse/api/resources/ingestion/types/ingestion_response.py b/langfuse/api/resources/ingestion/types/ingestion_response.py index b4e66349c..485ba04a0 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_response.py +++ b/langfuse/api/resources/ingestion/types/ingestion_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ingestion_error import IngestionError from .ingestion_success import IngestionSuccess -class IngestionResponse(pydantic_v1.BaseModel): +class IngestionResponse(UniversalBaseModel): successes: typing.List[IngestionSuccess] errors: typing.List[IngestionError] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/ingestion_success.py b/langfuse/api/resources/ingestion/types/ingestion_success.py index 481e64752..34b149e04 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_success.py +++ b/langfuse/api/resources/ingestion/types/ingestion_success.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class IngestionSuccess(pydantic_v1.BaseModel): +class IngestionSuccess(UniversalBaseModel): id: str status: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/observation_body.py b/langfuse/api/resources/ingestion/types/observation_body.py index d191a1f12..bf4b9ee4d 100644 --- a/langfuse/api/resources/ingestion/types/observation_body.py +++ b/langfuse/api/resources/ingestion/types/observation_body.py @@ -3,75 +3,58 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.map_value import MapValue from ...commons.types.observation_level import ObservationLevel from ...commons.types.usage import Usage from .observation_type import ObservationType -class ObservationBody(pydantic_v1.BaseModel): +class ObservationBody(UniversalBaseModel): id: typing.Optional[str] = None - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None type: ObservationType name: typing.Optional[str] = None - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="startTime", default=None - ) - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="endTime", default=None - ) - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="completionStartTime", default=None - ) + start_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="startTime") + ] = None + end_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="endTime") + ] = None + completion_start_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="completionStartTime") + ] = None model: typing.Optional[str] = None - model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( - alias="modelParameters", default=None - ) + model_parameters: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, MapValue]], + FieldMetadata(alias="modelParameters"), + ] = None input: typing.Optional[typing.Any] = None version: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None output: typing.Optional[typing.Any] = None usage: typing.Optional[Usage] = None level: typing.Optional[ObservationLevel] = None - status_message: typing.Optional[str] = pydantic_v1.Field( - alias="statusMessage", default=None - ) - parent_observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="parentObservationId", default=None - ) + status_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="statusMessage") + ] = None + parent_observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="parentObservationId") + ] = None environment: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/observation_type.py b/langfuse/api/resources/ingestion/types/observation_type.py index 2f11300ff..eb6079ba7 100644 --- a/langfuse/api/resources/ingestion/types/observation_type.py +++ b/langfuse/api/resources/ingestion/types/observation_type.py @@ -1,53 +1,19 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class ObservationType(str, enum.Enum): - SPAN = "SPAN" - GENERATION = "GENERATION" - EVENT = "EVENT" - AGENT = "AGENT" - TOOL = "TOOL" - CHAIN = "CHAIN" - RETRIEVER = "RETRIEVER" - EVALUATOR = "EVALUATOR" - EMBEDDING = "EMBEDDING" - GUARDRAIL = "GUARDRAIL" - - def visit( - self, - span: typing.Callable[[], T_Result], - generation: typing.Callable[[], T_Result], - event: typing.Callable[[], T_Result], - agent: typing.Callable[[], T_Result], - tool: typing.Callable[[], T_Result], - chain: typing.Callable[[], T_Result], - retriever: typing.Callable[[], T_Result], - evaluator: typing.Callable[[], T_Result], - embedding: typing.Callable[[], T_Result], - guardrail: typing.Callable[[], T_Result], - ) -> T_Result: - if self is ObservationType.SPAN: - return span() - if self is ObservationType.GENERATION: - return generation() - if self is ObservationType.EVENT: - return event() - if self is ObservationType.AGENT: - return agent() - if self is ObservationType.TOOL: - return tool() - if self is ObservationType.CHAIN: - return chain() - if self is ObservationType.RETRIEVER: - return retriever() - if self is ObservationType.EVALUATOR: - return evaluator() - if self is ObservationType.EMBEDDING: - return embedding() - if self is ObservationType.GUARDRAIL: - return guardrail() +ObservationType = typing.Union[ + typing.Literal[ + "SPAN", + "GENERATION", + "EVENT", + "AGENT", + "TOOL", + "CHAIN", + "RETRIEVER", + "EVALUATOR", + "EMBEDDING", + "GUARDRAIL", + ], + typing.Any, +] diff --git a/langfuse/api/resources/ingestion/types/open_ai_completion_usage_schema.py b/langfuse/api/resources/ingestion/types/open_ai_completion_usage_schema.py index 368a7da03..2cc8cedf1 100644 --- a/langfuse/api/resources/ingestion/types/open_ai_completion_usage_schema.py +++ b/langfuse/api/resources/ingestion/types/open_ai_completion_usage_schema.py @@ -1,13 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class OpenAiCompletionUsageSchema(pydantic_v1.BaseModel): +class OpenAiCompletionUsageSchema(UniversalBaseModel): """ OpenAI Usage schema from (Chat-)Completion APIs """ @@ -22,33 +21,13 @@ class OpenAiCompletionUsageSchema(pydantic_v1.BaseModel): typing.Dict[str, typing.Optional[int]] ] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/open_ai_response_usage_schema.py b/langfuse/api/resources/ingestion/types/open_ai_response_usage_schema.py index 0c68e6a7d..7f5cd4ace 100644 --- a/langfuse/api/resources/ingestion/types/open_ai_response_usage_schema.py +++ b/langfuse/api/resources/ingestion/types/open_ai_response_usage_schema.py @@ -1,13 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class OpenAiResponseUsageSchema(pydantic_v1.BaseModel): +class OpenAiResponseUsageSchema(UniversalBaseModel): """ OpenAI Usage schema from Response API """ @@ -20,33 +19,13 @@ class OpenAiResponseUsageSchema(pydantic_v1.BaseModel): None ) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/open_ai_usage.py b/langfuse/api/resources/ingestion/types/open_ai_usage.py index 86e7ebd82..7db130306 100644 --- a/langfuse/api/resources/ingestion/types/open_ai_usage.py +++ b/langfuse/api/resources/ingestion/types/open_ai_usage.py @@ -1,56 +1,35 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class OpenAiUsage(pydantic_v1.BaseModel): +class OpenAiUsage(UniversalBaseModel): """ Usage interface of OpenAI for improved compatibility. """ - prompt_tokens: typing.Optional[int] = pydantic_v1.Field( - alias="promptTokens", default=None - ) - completion_tokens: typing.Optional[int] = pydantic_v1.Field( - alias="completionTokens", default=None - ) - total_tokens: typing.Optional[int] = pydantic_v1.Field( - alias="totalTokens", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + prompt_tokens: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="promptTokens") + ] = None + completion_tokens: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="completionTokens") + ] = None + total_tokens: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="totalTokens") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/optional_observation_body.py b/langfuse/api/resources/ingestion/types/optional_observation_body.py index 7302d30f9..3793ebd8f 100644 --- a/langfuse/api/resources/ingestion/types/optional_observation_body.py +++ b/langfuse/api/resources/ingestion/types/optional_observation_body.py @@ -3,59 +3,41 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.observation_level import ObservationLevel -class OptionalObservationBody(pydantic_v1.BaseModel): - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) +class OptionalObservationBody(UniversalBaseModel): + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None name: typing.Optional[str] = None - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="startTime", default=None - ) + start_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="startTime") + ] = None metadata: typing.Optional[typing.Any] = None input: typing.Optional[typing.Any] = None output: typing.Optional[typing.Any] = None level: typing.Optional[ObservationLevel] = None - status_message: typing.Optional[str] = pydantic_v1.Field( - alias="statusMessage", default=None - ) - parent_observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="parentObservationId", default=None - ) + status_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="statusMessage") + ] = None + parent_observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="parentObservationId") + ] = None version: typing.Optional[str] = None environment: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/score_body.py b/langfuse/api/resources/ingestion/types/score_body.py index 549046564..412604602 100644 --- a/langfuse/api/resources/ingestion/types/score_body.py +++ b/langfuse/api/resources/ingestion/types/score_body.py @@ -1,19 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.create_score_value import CreateScoreValue from ...commons.types.score_data_type import ScoreDataType -class ScoreBody(pydantic_v1.BaseModel): +class ScoreBody(UniversalBaseModel): """ Examples -------- - from langfuse import ScoreBody + from langfuse.resources.ingestion import ScoreBody ScoreBody( name="novelty", @@ -23,71 +24,55 @@ class ScoreBody(pydantic_v1.BaseModel): """ id: typing.Optional[str] = None - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str environment: typing.Optional[str] = None - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = pydantic.Field(default=None) """ The annotation queue referenced by the score. Indicates if score was initially created while processing annotation queue. """ - value: CreateScoreValue = pydantic_v1.Field() + value: CreateScoreValue = pydantic.Field() """ The value of the score. Must be passed as string for categorical scores, and numeric for boolean and numeric scores. Boolean score values must equal either 1 or 0 (true or false) """ comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - data_type: typing.Optional[ScoreDataType] = pydantic_v1.Field( - alias="dataType", default=None - ) + data_type: typing_extensions.Annotated[ + typing.Optional[ScoreDataType], FieldMetadata(alias="dataType") + ] = pydantic.Field(default=None) """ When set, must match the score value's type. If not set, will be inferred from the score value or config """ - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = pydantic.Field(default=None) """ Reference a score config on a score. When set, the score name must equal the config name and scores must comply with the config's range and data type. For categorical scores, the value must map to a config category. Numeric scores might be constrained by the score config's max and min values """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/score_event.py b/langfuse/api/resources/ingestion/types/score_event.py index ea05aedef..528413093 100644 --- a/langfuse/api/resources/ingestion/types/score_event.py +++ b/langfuse/api/resources/ingestion/types/score_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .score_body import ScoreBody @@ -12,35 +11,13 @@ class ScoreEvent(BaseEvent): body: ScoreBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/sdk_log_body.py b/langfuse/api/resources/ingestion/types/sdk_log_body.py index df8972860..6cf1c1668 100644 --- a/langfuse/api/resources/ingestion/types/sdk_log_body.py +++ b/langfuse/api/resources/ingestion/types/sdk_log_body.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class SdkLogBody(pydantic_v1.BaseModel): +class SdkLogBody(UniversalBaseModel): log: typing.Any - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/sdk_log_event.py b/langfuse/api/resources/ingestion/types/sdk_log_event.py index d7ad87de8..7ecb01fff 100644 --- a/langfuse/api/resources/ingestion/types/sdk_log_event.py +++ b/langfuse/api/resources/ingestion/types/sdk_log_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .sdk_log_body import SdkLogBody @@ -12,35 +11,13 @@ class SdkLogEvent(BaseEvent): body: SdkLogBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/trace_body.py b/langfuse/api/resources/ingestion/types/trace_body.py index 3f5550435..3e72742c7 100644 --- a/langfuse/api/resources/ingestion/types/trace_body.py +++ b/langfuse/api/resources/ingestion/types/trace_body.py @@ -3,59 +3,41 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class TraceBody(pydantic_v1.BaseModel): +class TraceBody(UniversalBaseModel): id: typing.Optional[str] = None timestamp: typing.Optional[dt.datetime] = None name: typing.Optional[str] = None - user_id: typing.Optional[str] = pydantic_v1.Field(alias="userId", default=None) + user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="userId") + ] = None input: typing.Optional[typing.Any] = None output: typing.Optional[typing.Any] = None - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None release: typing.Optional[str] = None version: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None tags: typing.Optional[typing.List[str]] = None environment: typing.Optional[str] = None - public: typing.Optional[bool] = pydantic_v1.Field(default=None) + public: typing.Optional[bool] = pydantic.Field(default=None) """ Make trace publicly accessible via url """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/trace_event.py b/langfuse/api/resources/ingestion/types/trace_event.py index b84ddd615..5404799bb 100644 --- a/langfuse/api/resources/ingestion/types/trace_event.py +++ b/langfuse/api/resources/ingestion/types/trace_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .trace_body import TraceBody @@ -12,35 +11,13 @@ class TraceEvent(BaseEvent): body: TraceBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/update_event_body.py b/langfuse/api/resources/ingestion/types/update_event_body.py index 35bbb359b..fc56b209b 100644 --- a/langfuse/api/resources/ingestion/types/update_event_body.py +++ b/langfuse/api/resources/ingestion/types/update_event_body.py @@ -1,45 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .optional_observation_body import OptionalObservationBody class UpdateEventBody(OptionalObservationBody): id: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/update_generation_body.py b/langfuse/api/resources/ingestion/types/update_generation_body.py index 2058543af..fadbcbccf 100644 --- a/langfuse/api/resources/ingestion/types/update_generation_body.py +++ b/langfuse/api/resources/ingestion/types/update_generation_body.py @@ -3,8 +3,10 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from ...commons.types.map_value import MapValue from .ingestion_usage import IngestionUsage from .update_span_body import UpdateSpanBody @@ -12,56 +14,35 @@ class UpdateGenerationBody(UpdateSpanBody): - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="completionStartTime", default=None - ) + completion_start_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="completionStartTime") + ] = None model: typing.Optional[str] = None - model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( - alias="modelParameters", default=None - ) + model_parameters: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, MapValue]], + FieldMetadata(alias="modelParameters"), + ] = None usage: typing.Optional[IngestionUsage] = None - prompt_name: typing.Optional[str] = pydantic_v1.Field( - alias="promptName", default=None - ) - usage_details: typing.Optional[UsageDetails] = pydantic_v1.Field( - alias="usageDetails", default=None - ) - cost_details: typing.Optional[typing.Dict[str, float]] = pydantic_v1.Field( - alias="costDetails", default=None - ) - prompt_version: typing.Optional[int] = pydantic_v1.Field( - alias="promptVersion", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + prompt_name: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="promptName") + ] = None + usage_details: typing_extensions.Annotated[ + typing.Optional[UsageDetails], FieldMetadata(alias="usageDetails") + ] = None + cost_details: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, float]], FieldMetadata(alias="costDetails") + ] = None + prompt_version: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="promptVersion") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/update_generation_event.py b/langfuse/api/resources/ingestion/types/update_generation_event.py index da8f6a9fa..3c78c6a50 100644 --- a/langfuse/api/resources/ingestion/types/update_generation_event.py +++ b/langfuse/api/resources/ingestion/types/update_generation_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .update_generation_body import UpdateGenerationBody @@ -12,35 +11,13 @@ class UpdateGenerationEvent(BaseEvent): body: UpdateGenerationBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/update_observation_event.py b/langfuse/api/resources/ingestion/types/update_observation_event.py index 9d7af357f..df6944dc1 100644 --- a/langfuse/api/resources/ingestion/types/update_observation_event.py +++ b/langfuse/api/resources/ingestion/types/update_observation_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .observation_body import ObservationBody @@ -12,35 +11,13 @@ class UpdateObservationEvent(BaseEvent): body: ObservationBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/update_span_body.py b/langfuse/api/resources/ingestion/types/update_span_body.py index e3484879b..afb230b10 100644 --- a/langfuse/api/resources/ingestion/types/update_span_body.py +++ b/langfuse/api/resources/ingestion/types/update_span_body.py @@ -3,45 +3,25 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.serialization import FieldMetadata from .update_event_body import UpdateEventBody class UpdateSpanBody(UpdateEventBody): - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="endTime", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + end_time: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="endTime") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/ingestion/types/update_span_event.py b/langfuse/api/resources/ingestion/types/update_span_event.py index ec7d83b15..2251a7b1b 100644 --- a/langfuse/api/resources/ingestion/types/update_span_event.py +++ b/langfuse/api/resources/ingestion/types/update_span_event.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_event import BaseEvent from .update_span_body import UpdateSpanBody @@ -12,35 +11,13 @@ class UpdateSpanEvent(BaseEvent): body: UpdateSpanBody - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/llm_connections/__init__.py b/langfuse/api/resources/llm_connections/__init__.py index 3cf778f1b..aba7157f1 100644 --- a/langfuse/api/resources/llm_connections/__init__.py +++ b/langfuse/api/resources/llm_connections/__init__.py @@ -1,11 +1,51 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - LlmAdapter, - LlmConnection, - PaginatedLlmConnections, - UpsertLlmConnectionRequest, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + LlmAdapter, + LlmConnection, + PaginatedLlmConnections, + UpsertLlmConnectionRequest, + ) +_dynamic_imports: typing.Dict[str, str] = { + "LlmAdapter": ".types", + "LlmConnection": ".types", + "PaginatedLlmConnections": ".types", + "UpsertLlmConnectionRequest": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "LlmAdapter", diff --git a/langfuse/api/resources/llm_connections/client.py b/langfuse/api/resources/llm_connections/client.py index 4497598c5..ca7ff56fa 100644 --- a/langfuse/api/resources/llm_connections/client.py +++ b/langfuse/api/resources/llm_connections/client.py @@ -1,17 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawLlmConnectionsClient, RawLlmConnectionsClient from .types.llm_connection import LlmConnection from .types.paginated_llm_connections import PaginatedLlmConnections from .types.upsert_llm_connection_request import UpsertLlmConnectionRequest @@ -22,7 +15,18 @@ class LlmConnectionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawLlmConnectionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawLlmConnectionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawLlmConnectionsClient + """ + return self._raw_client def list( self, @@ -51,7 +55,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -63,39 +67,10 @@ def list( ) client.llm_connections.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/llm-connections", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = self._raw_client.list( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedLlmConnections, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def upsert( self, @@ -119,8 +94,8 @@ def upsert( Examples -------- - from langfuse import LlmAdapter, UpsertLlmConnectionRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.llm_connections import UpsertLlmConnectionRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -133,48 +108,31 @@ def upsert( client.llm_connections.upsert( request=UpsertLlmConnectionRequest( provider="provider", - adapter=LlmAdapter.ANTHROPIC, + adapter="anthropic", secret_key="secretKey", ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/llm-connections", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.upsert( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LlmConnection, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncLlmConnectionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawLlmConnectionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawLlmConnectionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawLlmConnectionsClient + """ + return self._raw_client async def list( self, @@ -205,7 +163,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -223,39 +181,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/llm-connections", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = await self._raw_client.list( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PaginatedLlmConnections, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def upsert( self, @@ -281,8 +210,8 @@ async def upsert( -------- import asyncio - from langfuse import LlmAdapter, UpsertLlmConnectionRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.llm_connections import UpsertLlmConnectionRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -298,7 +227,7 @@ async def main() -> None: await client.llm_connections.upsert( request=UpsertLlmConnectionRequest( provider="provider", - adapter=LlmAdapter.ANTHROPIC, + adapter="anthropic", secret_key="secretKey", ), ) @@ -306,35 +235,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/llm-connections", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.upsert( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LlmConnection, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/llm_connections/raw_client.py b/langfuse/api/resources/llm_connections/raw_client.py new file mode 100644 index 000000000..35481b6fa --- /dev/null +++ b/langfuse/api/resources/llm_connections/raw_client.py @@ -0,0 +1,474 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.llm_connection import LlmConnection +from .types.paginated_llm_connections import PaginatedLlmConnections +from .types.upsert_llm_connection_request import UpsertLlmConnectionRequest + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawLlmConnectionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedLlmConnections]: + """ + Get all LLM connections in a project + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedLlmConnections] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/llm-connections", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedLlmConnections, + parse_obj_as( + type_=PaginatedLlmConnections, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def upsert( + self, + *, + request: UpsertLlmConnectionRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LlmConnection]: + """ + Create or update an LLM connection. The connection is upserted on provider. + + Parameters + ---------- + request : UpsertLlmConnectionRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LlmConnection] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/llm-connections", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=UpsertLlmConnectionRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LlmConnection, + parse_obj_as( + type_=LlmConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawLlmConnectionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedLlmConnections]: + """ + Get all LLM connections in a project + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedLlmConnections] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/llm-connections", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedLlmConnections, + parse_obj_as( + type_=PaginatedLlmConnections, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def upsert( + self, + *, + request: UpsertLlmConnectionRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LlmConnection]: + """ + Create or update an LLM connection. The connection is upserted on provider. + + Parameters + ---------- + request : UpsertLlmConnectionRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LlmConnection] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/llm-connections", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, + annotation=UpsertLlmConnectionRequest, + direction="write", + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LlmConnection, + parse_obj_as( + type_=LlmConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/llm_connections/types/__init__.py b/langfuse/api/resources/llm_connections/types/__init__.py index b490e6e27..e6ba89200 100644 --- a/langfuse/api/resources/llm_connections/types/__init__.py +++ b/langfuse/api/resources/llm_connections/types/__init__.py @@ -1,9 +1,49 @@ # This file was auto-generated by Fern from our API Definition. -from .llm_adapter import LlmAdapter -from .llm_connection import LlmConnection -from .paginated_llm_connections import PaginatedLlmConnections -from .upsert_llm_connection_request import UpsertLlmConnectionRequest +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .llm_adapter import LlmAdapter + from .llm_connection import LlmConnection + from .paginated_llm_connections import PaginatedLlmConnections + from .upsert_llm_connection_request import UpsertLlmConnectionRequest +_dynamic_imports: typing.Dict[str, str] = { + "LlmAdapter": ".llm_adapter", + "LlmConnection": ".llm_connection", + "PaginatedLlmConnections": ".paginated_llm_connections", + "UpsertLlmConnectionRequest": ".upsert_llm_connection_request", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "LlmAdapter", diff --git a/langfuse/api/resources/llm_connections/types/llm_adapter.py b/langfuse/api/resources/llm_connections/types/llm_adapter.py index d03513aeb..51470bf1d 100644 --- a/langfuse/api/resources/llm_connections/types/llm_adapter.py +++ b/langfuse/api/resources/llm_connections/types/llm_adapter.py @@ -1,37 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class LlmAdapter(str, enum.Enum): - ANTHROPIC = "anthropic" - OPEN_AI = "openai" - AZURE = "azure" - BEDROCK = "bedrock" - GOOGLE_VERTEX_AI = "google-vertex-ai" - GOOGLE_AI_STUDIO = "google-ai-studio" - - def visit( - self, - anthropic: typing.Callable[[], T_Result], - open_ai: typing.Callable[[], T_Result], - azure: typing.Callable[[], T_Result], - bedrock: typing.Callable[[], T_Result], - google_vertex_ai: typing.Callable[[], T_Result], - google_ai_studio: typing.Callable[[], T_Result], - ) -> T_Result: - if self is LlmAdapter.ANTHROPIC: - return anthropic() - if self is LlmAdapter.OPEN_AI: - return open_ai() - if self is LlmAdapter.AZURE: - return azure() - if self is LlmAdapter.BEDROCK: - return bedrock() - if self is LlmAdapter.GOOGLE_VERTEX_AI: - return google_vertex_ai() - if self is LlmAdapter.GOOGLE_AI_STUDIO: - return google_ai_studio() +LlmAdapter = typing.Union[ + typing.Literal[ + "anthropic", + "openai", + "azure", + "bedrock", + "google-vertex-ai", + "google-ai-studio", + ], + typing.Any, +] diff --git a/langfuse/api/resources/llm_connections/types/llm_connection.py b/langfuse/api/resources/llm_connections/types/llm_connection.py index 0b17b97a7..79508b50d 100644 --- a/langfuse/api/resources/llm_connections/types/llm_connection.py +++ b/langfuse/api/resources/llm_connections/types/llm_connection.py @@ -3,83 +3,77 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class LlmConnection(pydantic_v1.BaseModel): +class LlmConnection(UniversalBaseModel): """ LLM API connection configuration (secrets excluded) """ id: str - provider: str = pydantic_v1.Field() + provider: str = pydantic.Field() """ Provider name (e.g., 'openai', 'my-gateway'). Must be unique in project, used for upserting. """ - adapter: str = pydantic_v1.Field() + adapter: str = pydantic.Field() """ The adapter used to interface with the LLM """ - display_secret_key: str = pydantic_v1.Field(alias="displaySecretKey") + display_secret_key: typing_extensions.Annotated[ + str, FieldMetadata(alias="displaySecretKey") + ] = pydantic.Field() """ Masked version of the secret key for display purposes """ - base_url: typing.Optional[str] = pydantic_v1.Field(alias="baseURL", default=None) + base_url: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="baseURL") + ] = pydantic.Field(default=None) """ Custom base URL for the LLM API """ - custom_models: typing.List[str] = pydantic_v1.Field(alias="customModels") + custom_models: typing_extensions.Annotated[ + typing.List[str], FieldMetadata(alias="customModels") + ] = pydantic.Field() """ List of custom model names available for this connection """ - with_default_models: bool = pydantic_v1.Field(alias="withDefaultModels") + with_default_models: typing_extensions.Annotated[ + bool, FieldMetadata(alias="withDefaultModels") + ] = pydantic.Field() """ Whether to include default models for this adapter """ - extra_header_keys: typing.List[str] = pydantic_v1.Field(alias="extraHeaderKeys") + extra_header_keys: typing_extensions.Annotated[ + typing.List[str], FieldMetadata(alias="extraHeaderKeys") + ] = pydantic.Field() """ Keys of extra headers sent with requests (values excluded for security) """ - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/llm_connections/types/paginated_llm_connections.py b/langfuse/api/resources/llm_connections/types/paginated_llm_connections.py index 986dbb0bb..2f9c02b48 100644 --- a/langfuse/api/resources/llm_connections/types/paginated_llm_connections.py +++ b/langfuse/api/resources/llm_connections/types/paginated_llm_connections.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...utils.resources.pagination.types.meta_response import MetaResponse from .llm_connection import LlmConnection -class PaginatedLlmConnections(pydantic_v1.BaseModel): +class PaginatedLlmConnections(UniversalBaseModel): data: typing.List[LlmConnection] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/llm_connections/types/upsert_llm_connection_request.py b/langfuse/api/resources/llm_connections/types/upsert_llm_connection_request.py index d0a5a368d..028d00a03 100644 --- a/langfuse/api/resources/llm_connections/types/upsert_llm_connection_request.py +++ b/langfuse/api/resources/llm_connections/types/upsert_llm_connection_request.py @@ -1,88 +1,71 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .llm_adapter import LlmAdapter -class UpsertLlmConnectionRequest(pydantic_v1.BaseModel): +class UpsertLlmConnectionRequest(UniversalBaseModel): """ Request to create or update an LLM connection (upsert) """ - provider: str = pydantic_v1.Field() + provider: str = pydantic.Field() """ Provider name (e.g., 'openai', 'my-gateway'). Must be unique in project, used for upserting. """ - adapter: LlmAdapter = pydantic_v1.Field() + adapter: LlmAdapter = pydantic.Field() """ The adapter used to interface with the LLM """ - secret_key: str = pydantic_v1.Field(alias="secretKey") + secret_key: typing_extensions.Annotated[str, FieldMetadata(alias="secretKey")] = ( + pydantic.Field() + ) """ Secret key for the LLM API. """ - base_url: typing.Optional[str] = pydantic_v1.Field(alias="baseURL", default=None) + base_url: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="baseURL") + ] = pydantic.Field(default=None) """ Custom base URL for the LLM API """ - custom_models: typing.Optional[typing.List[str]] = pydantic_v1.Field( - alias="customModels", default=None - ) + custom_models: typing_extensions.Annotated[ + typing.Optional[typing.List[str]], FieldMetadata(alias="customModels") + ] = pydantic.Field(default=None) """ List of custom model names """ - with_default_models: typing.Optional[bool] = pydantic_v1.Field( - alias="withDefaultModels", default=None - ) + with_default_models: typing_extensions.Annotated[ + typing.Optional[bool], FieldMetadata(alias="withDefaultModels") + ] = pydantic.Field(default=None) """ Whether to include default models. Default is true. """ - extra_headers: typing.Optional[typing.Dict[str, str]] = pydantic_v1.Field( - alias="extraHeaders", default=None - ) + extra_headers: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, str]], FieldMetadata(alias="extraHeaders") + ] = pydantic.Field(default=None) """ Extra headers to send with requests """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/media/__init__.py b/langfuse/api/resources/media/__init__.py index f337d7a04..85d8f7b4f 100644 --- a/langfuse/api/resources/media/__init__.py +++ b/langfuse/api/resources/media/__init__.py @@ -1,12 +1,53 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - GetMediaResponse, - GetMediaUploadUrlRequest, - GetMediaUploadUrlResponse, - MediaContentType, - PatchMediaBody, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + GetMediaResponse, + GetMediaUploadUrlRequest, + GetMediaUploadUrlResponse, + MediaContentType, + PatchMediaBody, + ) +_dynamic_imports: typing.Dict[str, str] = { + "GetMediaResponse": ".types", + "GetMediaUploadUrlRequest": ".types", + "GetMediaUploadUrlResponse": ".types", + "MediaContentType": ".types", + "PatchMediaBody": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "GetMediaResponse", diff --git a/langfuse/api/resources/media/client.py b/langfuse/api/resources/media/client.py index bb8e4b149..77b4b375a 100644 --- a/langfuse/api/resources/media/client.py +++ b/langfuse/api/resources/media/client.py @@ -1,18 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawMediaClient, RawMediaClient from .types.get_media_response import GetMediaResponse from .types.get_media_upload_url_request import GetMediaUploadUrlRequest from .types.get_media_upload_url_response import GetMediaUploadUrlResponse @@ -24,7 +16,18 @@ class MediaClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawMediaClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawMediaClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawMediaClient + """ + return self._raw_client def get( self, media_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -46,7 +49,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -60,36 +63,8 @@ def get( media_id="mediaId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/media/{jsonable_encoder(media_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetMediaResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(media_id, request_options=request_options) + return _response.data def patch( self, @@ -119,8 +94,8 @@ def patch( -------- import datetime - from langfuse import PatchMediaBody - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.media import PatchMediaBody client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -140,38 +115,10 @@ def patch( ), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/media/{jsonable_encoder(media_id)}", - method="PATCH", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.patch( + media_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_upload_url( self, @@ -195,8 +142,8 @@ def get_upload_url( Examples -------- - from langfuse import GetMediaUploadUrlRequest, MediaContentType - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.media import GetMediaUploadUrlRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -209,52 +156,33 @@ def get_upload_url( client.media.get_upload_url( request=GetMediaUploadUrlRequest( trace_id="traceId", - content_type=MediaContentType.IMAGE_PNG, + content_type="image/png", content_length=1, sha_256_hash="sha256Hash", field="field", ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/media", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.get_upload_url( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - GetMediaUploadUrlResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncMediaClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawMediaClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawMediaClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawMediaClient + """ + return self._raw_client async def get( self, media_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -278,7 +206,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -298,36 +226,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/media/{jsonable_encoder(media_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get( + media_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetMediaResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def patch( self, @@ -358,8 +260,8 @@ async def patch( import asyncio import datetime - from langfuse import PatchMediaBody - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.media import PatchMediaBody client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -385,38 +287,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/media/{jsonable_encoder(media_id)}", - method="PATCH", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.patch( + media_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_upload_url( self, @@ -442,8 +316,8 @@ async def get_upload_url( -------- import asyncio - from langfuse import GetMediaUploadUrlRequest, MediaContentType - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.media import GetMediaUploadUrlRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -459,7 +333,7 @@ async def main() -> None: await client.media.get_upload_url( request=GetMediaUploadUrlRequest( trace_id="traceId", - content_type=MediaContentType.IMAGE_PNG, + content_type="image/png", content_length=1, sha_256_hash="sha256Hash", field="field", @@ -469,37 +343,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/media", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.get_upload_url( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - GetMediaUploadUrlResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/media/raw_client.py b/langfuse/api/resources/media/raw_client.py new file mode 100644 index 000000000..3215b73f1 --- /dev/null +++ b/langfuse/api/resources/media/raw_client.py @@ -0,0 +1,658 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.get_media_response import GetMediaResponse +from .types.get_media_upload_url_request import GetMediaUploadUrlRequest +from .types.get_media_upload_url_response import GetMediaUploadUrlResponse +from .types.patch_media_body import PatchMediaBody + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawMediaClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get( + self, media_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GetMediaResponse]: + """ + Get a media record + + Parameters + ---------- + media_id : str + The unique langfuse identifier of a media record + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GetMediaResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/media/{jsonable_encoder(media_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetMediaResponse, + parse_obj_as( + type_=GetMediaResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def patch( + self, + media_id: str, + *, + request: PatchMediaBody, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + Patch a media record + + Parameters + ---------- + media_id : str + The unique langfuse identifier of a media record + + request : PatchMediaBody + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/media/{jsonable_encoder(media_id)}", + method="PATCH", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=PatchMediaBody, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_upload_url( + self, + *, + request: GetMediaUploadUrlRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GetMediaUploadUrlResponse]: + """ + Get a presigned upload URL for a media record + + Parameters + ---------- + request : GetMediaUploadUrlRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GetMediaUploadUrlResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/media", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=GetMediaUploadUrlRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetMediaUploadUrlResponse, + parse_obj_as( + type_=GetMediaUploadUrlResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawMediaClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, media_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GetMediaResponse]: + """ + Get a media record + + Parameters + ---------- + media_id : str + The unique langfuse identifier of a media record + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GetMediaResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/media/{jsonable_encoder(media_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetMediaResponse, + parse_obj_as( + type_=GetMediaResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def patch( + self, + media_id: str, + *, + request: PatchMediaBody, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + Patch a media record + + Parameters + ---------- + media_id : str + The unique langfuse identifier of a media record + + request : PatchMediaBody + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/media/{jsonable_encoder(media_id)}", + method="PATCH", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=PatchMediaBody, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_upload_url( + self, + *, + request: GetMediaUploadUrlRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GetMediaUploadUrlResponse]: + """ + Get a presigned upload URL for a media record + + Parameters + ---------- + request : GetMediaUploadUrlRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GetMediaUploadUrlResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/media", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=GetMediaUploadUrlRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetMediaUploadUrlResponse, + parse_obj_as( + type_=GetMediaUploadUrlResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/media/types/__init__.py b/langfuse/api/resources/media/types/__init__.py index 20af676d8..0fb9a44ed 100644 --- a/langfuse/api/resources/media/types/__init__.py +++ b/langfuse/api/resources/media/types/__init__.py @@ -1,10 +1,51 @@ # This file was auto-generated by Fern from our API Definition. -from .get_media_response import GetMediaResponse -from .get_media_upload_url_request import GetMediaUploadUrlRequest -from .get_media_upload_url_response import GetMediaUploadUrlResponse -from .media_content_type import MediaContentType -from .patch_media_body import PatchMediaBody +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .get_media_response import GetMediaResponse + from .get_media_upload_url_request import GetMediaUploadUrlRequest + from .get_media_upload_url_response import GetMediaUploadUrlResponse + from .media_content_type import MediaContentType + from .patch_media_body import PatchMediaBody +_dynamic_imports: typing.Dict[str, str] = { + "GetMediaResponse": ".get_media_response", + "GetMediaUploadUrlRequest": ".get_media_upload_url_request", + "GetMediaUploadUrlResponse": ".get_media_upload_url_response", + "MediaContentType": ".media_content_type", + "PatchMediaBody": ".patch_media_body", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "GetMediaResponse", diff --git a/langfuse/api/resources/media/types/get_media_response.py b/langfuse/api/resources/media/types/get_media_response.py index fa5368872..4b7e4cde6 100644 --- a/langfuse/api/resources/media/types/get_media_response.py +++ b/langfuse/api/resources/media/types/get_media_response.py @@ -3,70 +3,60 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class GetMediaResponse(pydantic_v1.BaseModel): - media_id: str = pydantic_v1.Field(alias="mediaId") +class GetMediaResponse(UniversalBaseModel): + media_id: typing_extensions.Annotated[str, FieldMetadata(alias="mediaId")] = ( + pydantic.Field() + ) """ The unique langfuse identifier of a media record """ - content_type: str = pydantic_v1.Field(alias="contentType") + content_type: typing_extensions.Annotated[ + str, FieldMetadata(alias="contentType") + ] = pydantic.Field() """ The MIME type of the media record """ - content_length: int = pydantic_v1.Field(alias="contentLength") + content_length: typing_extensions.Annotated[ + int, FieldMetadata(alias="contentLength") + ] = pydantic.Field() """ The size of the media record in bytes """ - uploaded_at: dt.datetime = pydantic_v1.Field(alias="uploadedAt") + uploaded_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="uploadedAt") + ] = pydantic.Field() """ The date and time when the media record was uploaded """ - url: str = pydantic_v1.Field() + url: str = pydantic.Field() """ The download URL of the media record """ - url_expiry: str = pydantic_v1.Field(alias="urlExpiry") + url_expiry: typing_extensions.Annotated[str, FieldMetadata(alias="urlExpiry")] = ( + pydantic.Field() + ) """ The expiry date and time of the media record download URL """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/media/types/get_media_upload_url_request.py b/langfuse/api/resources/media/types/get_media_upload_url_request.py index d0cde59fe..1533418d6 100644 --- a/langfuse/api/resources/media/types/get_media_upload_url_request.py +++ b/langfuse/api/resources/media/types/get_media_upload_url_request.py @@ -1,71 +1,58 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .media_content_type import MediaContentType -class GetMediaUploadUrlRequest(pydantic_v1.BaseModel): - trace_id: str = pydantic_v1.Field(alias="traceId") +class GetMediaUploadUrlRequest(UniversalBaseModel): + trace_id: typing_extensions.Annotated[str, FieldMetadata(alias="traceId")] = ( + pydantic.Field() + ) """ The trace ID associated with the media record """ - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = pydantic.Field(default=None) """ The observation ID associated with the media record. If the media record is associated directly with a trace, this will be null. """ - content_type: MediaContentType = pydantic_v1.Field(alias="contentType") - content_length: int = pydantic_v1.Field(alias="contentLength") + content_type: typing_extensions.Annotated[ + MediaContentType, FieldMetadata(alias="contentType") + ] + content_length: typing_extensions.Annotated[ + int, FieldMetadata(alias="contentLength") + ] = pydantic.Field() """ The size of the media record in bytes """ - sha_256_hash: str = pydantic_v1.Field(alias="sha256Hash") + sha_256_hash: typing_extensions.Annotated[ + str, FieldMetadata(alias="sha256Hash") + ] = pydantic.Field() """ The SHA-256 hash of the media record """ - field: str = pydantic_v1.Field() + field: str = pydantic.Field() """ The trace / observation field the media record is associated with. This can be one of `input`, `output`, `metadata` """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/media/types/get_media_upload_url_response.py b/langfuse/api/resources/media/types/get_media_upload_url_response.py index fadc76c01..3134c4cd5 100644 --- a/langfuse/api/resources/media/types/get_media_upload_url_response.py +++ b/langfuse/api/resources/media/types/get_media_upload_url_response.py @@ -1,54 +1,35 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class GetMediaUploadUrlResponse(pydantic_v1.BaseModel): - upload_url: typing.Optional[str] = pydantic_v1.Field( - alias="uploadUrl", default=None - ) +class GetMediaUploadUrlResponse(UniversalBaseModel): + upload_url: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="uploadUrl") + ] = pydantic.Field(default=None) """ The presigned upload URL. If the asset is already uploaded, this will be null """ - media_id: str = pydantic_v1.Field(alias="mediaId") + media_id: typing_extensions.Annotated[str, FieldMetadata(alias="mediaId")] = ( + pydantic.Field() + ) """ The unique langfuse identifier of a media record """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/media/types/media_content_type.py b/langfuse/api/resources/media/types/media_content_type.py index e8fdeefa2..34a7b159a 100644 --- a/langfuse/api/resources/media/types/media_content_type.py +++ b/langfuse/api/resources/media/types/media_content_type.py @@ -1,133 +1,38 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class MediaContentType(str, enum.Enum): - """ - The MIME type of the media record - """ - - IMAGE_PNG = "image/png" - IMAGE_JPEG = "image/jpeg" - IMAGE_JPG = "image/jpg" - IMAGE_WEBP = "image/webp" - IMAGE_GIF = "image/gif" - IMAGE_SVG_XML = "image/svg+xml" - IMAGE_TIFF = "image/tiff" - IMAGE_BMP = "image/bmp" - AUDIO_MPEG = "audio/mpeg" - AUDIO_MP_3 = "audio/mp3" - AUDIO_WAV = "audio/wav" - AUDIO_OGG = "audio/ogg" - AUDIO_OGA = "audio/oga" - AUDIO_AAC = "audio/aac" - AUDIO_MP_4 = "audio/mp4" - AUDIO_FLAC = "audio/flac" - VIDEO_MP_4 = "video/mp4" - VIDEO_WEBM = "video/webm" - TEXT_PLAIN = "text/plain" - TEXT_HTML = "text/html" - TEXT_CSS = "text/css" - TEXT_CSV = "text/csv" - APPLICATION_PDF = "application/pdf" - APPLICATION_MSWORD = "application/msword" - APPLICATION_MS_EXCEL = "application/vnd.ms-excel" - APPLICATION_ZIP = "application/zip" - APPLICATION_JSON = "application/json" - APPLICATION_XML = "application/xml" - APPLICATION_OCTET_STREAM = "application/octet-stream" - - def visit( - self, - image_png: typing.Callable[[], T_Result], - image_jpeg: typing.Callable[[], T_Result], - image_jpg: typing.Callable[[], T_Result], - image_webp: typing.Callable[[], T_Result], - image_gif: typing.Callable[[], T_Result], - image_svg_xml: typing.Callable[[], T_Result], - image_tiff: typing.Callable[[], T_Result], - image_bmp: typing.Callable[[], T_Result], - audio_mpeg: typing.Callable[[], T_Result], - audio_mp_3: typing.Callable[[], T_Result], - audio_wav: typing.Callable[[], T_Result], - audio_ogg: typing.Callable[[], T_Result], - audio_oga: typing.Callable[[], T_Result], - audio_aac: typing.Callable[[], T_Result], - audio_mp_4: typing.Callable[[], T_Result], - audio_flac: typing.Callable[[], T_Result], - video_mp_4: typing.Callable[[], T_Result], - video_webm: typing.Callable[[], T_Result], - text_plain: typing.Callable[[], T_Result], - text_html: typing.Callable[[], T_Result], - text_css: typing.Callable[[], T_Result], - text_csv: typing.Callable[[], T_Result], - application_pdf: typing.Callable[[], T_Result], - application_msword: typing.Callable[[], T_Result], - application_ms_excel: typing.Callable[[], T_Result], - application_zip: typing.Callable[[], T_Result], - application_json: typing.Callable[[], T_Result], - application_xml: typing.Callable[[], T_Result], - application_octet_stream: typing.Callable[[], T_Result], - ) -> T_Result: - if self is MediaContentType.IMAGE_PNG: - return image_png() - if self is MediaContentType.IMAGE_JPEG: - return image_jpeg() - if self is MediaContentType.IMAGE_JPG: - return image_jpg() - if self is MediaContentType.IMAGE_WEBP: - return image_webp() - if self is MediaContentType.IMAGE_GIF: - return image_gif() - if self is MediaContentType.IMAGE_SVG_XML: - return image_svg_xml() - if self is MediaContentType.IMAGE_TIFF: - return image_tiff() - if self is MediaContentType.IMAGE_BMP: - return image_bmp() - if self is MediaContentType.AUDIO_MPEG: - return audio_mpeg() - if self is MediaContentType.AUDIO_MP_3: - return audio_mp_3() - if self is MediaContentType.AUDIO_WAV: - return audio_wav() - if self is MediaContentType.AUDIO_OGG: - return audio_ogg() - if self is MediaContentType.AUDIO_OGA: - return audio_oga() - if self is MediaContentType.AUDIO_AAC: - return audio_aac() - if self is MediaContentType.AUDIO_MP_4: - return audio_mp_4() - if self is MediaContentType.AUDIO_FLAC: - return audio_flac() - if self is MediaContentType.VIDEO_MP_4: - return video_mp_4() - if self is MediaContentType.VIDEO_WEBM: - return video_webm() - if self is MediaContentType.TEXT_PLAIN: - return text_plain() - if self is MediaContentType.TEXT_HTML: - return text_html() - if self is MediaContentType.TEXT_CSS: - return text_css() - if self is MediaContentType.TEXT_CSV: - return text_csv() - if self is MediaContentType.APPLICATION_PDF: - return application_pdf() - if self is MediaContentType.APPLICATION_MSWORD: - return application_msword() - if self is MediaContentType.APPLICATION_MS_EXCEL: - return application_ms_excel() - if self is MediaContentType.APPLICATION_ZIP: - return application_zip() - if self is MediaContentType.APPLICATION_JSON: - return application_json() - if self is MediaContentType.APPLICATION_XML: - return application_xml() - if self is MediaContentType.APPLICATION_OCTET_STREAM: - return application_octet_stream() +MediaContentType = typing.Union[ + typing.Literal[ + "image/png", + "image/jpeg", + "image/jpg", + "image/webp", + "image/gif", + "image/svg+xml", + "image/tiff", + "image/bmp", + "audio/mpeg", + "audio/mp3", + "audio/wav", + "audio/ogg", + "audio/oga", + "audio/aac", + "audio/mp4", + "audio/flac", + "video/mp4", + "video/webm", + "text/plain", + "text/html", + "text/css", + "text/csv", + "application/pdf", + "application/msword", + "application/vnd.ms-excel", + "application/zip", + "application/json", + "application/xml", + "application/octet-stream", + ], + typing.Any, +] diff --git a/langfuse/api/resources/media/types/patch_media_body.py b/langfuse/api/resources/media/types/patch_media_body.py index 49f0c3432..bea2d76c6 100644 --- a/langfuse/api/resources/media/types/patch_media_body.py +++ b/langfuse/api/resources/media/types/patch_media_body.py @@ -3,64 +3,48 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class PatchMediaBody(pydantic_v1.BaseModel): - uploaded_at: dt.datetime = pydantic_v1.Field(alias="uploadedAt") +class PatchMediaBody(UniversalBaseModel): + uploaded_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="uploadedAt") + ] = pydantic.Field() """ The date and time when the media record was uploaded """ - upload_http_status: int = pydantic_v1.Field(alias="uploadHttpStatus") + upload_http_status: typing_extensions.Annotated[ + int, FieldMetadata(alias="uploadHttpStatus") + ] = pydantic.Field() """ The HTTP status code of the upload """ - upload_http_error: typing.Optional[str] = pydantic_v1.Field( - alias="uploadHttpError", default=None - ) + upload_http_error: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="uploadHttpError") + ] = pydantic.Field(default=None) """ The HTTP error message of the upload """ - upload_time_ms: typing.Optional[int] = pydantic_v1.Field( - alias="uploadTimeMs", default=None - ) + upload_time_ms: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="uploadTimeMs") + ] = pydantic.Field(default=None) """ The time in milliseconds it took to upload the media record """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/metrics/__init__.py b/langfuse/api/resources/metrics/__init__.py index 90e510b5f..fb47bc976 100644 --- a/langfuse/api/resources/metrics/__init__.py +++ b/langfuse/api/resources/metrics/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .types import MetricsResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import MetricsResponse +_dynamic_imports: typing.Dict[str, str] = {"MetricsResponse": ".types"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["MetricsResponse"] diff --git a/langfuse/api/resources/metrics/client.py b/langfuse/api/resources/metrics/client.py index 471f5182e..295e489a1 100644 --- a/langfuse/api/resources/metrics/client.py +++ b/langfuse/api/resources/metrics/client.py @@ -1,23 +1,27 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawMetricsClient, RawMetricsClient from .types.metrics_response import MetricsResponse class MetricsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawMetricsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawMetricsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawMetricsClient + """ + return self._raw_client def metrics( self, *, query: str, request_options: typing.Optional[RequestOptions] = None @@ -81,7 +85,7 @@ def metrics( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -95,42 +99,26 @@ def metrics( query="query", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/metrics", - method="GET", - params={"query": query}, - request_options=request_options, + _response = self._raw_client.metrics( + query=query, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetricsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncMetricsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawMetricsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawMetricsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawMetricsClient + """ + return self._raw_client async def metrics( self, *, query: str, request_options: typing.Optional[RequestOptions] = None @@ -196,7 +184,7 @@ async def metrics( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -216,34 +204,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/metrics", - method="GET", - params={"query": query}, - request_options=request_options, + _response = await self._raw_client.metrics( + query=query, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetricsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/metrics/raw_client.py b/langfuse/api/resources/metrics/raw_client.py new file mode 100644 index 000000000..99b985f93 --- /dev/null +++ b/langfuse/api/resources/metrics/raw_client.py @@ -0,0 +1,318 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.metrics_response import MetricsResponse + + +class RawMetricsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def metrics( + self, *, query: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[MetricsResponse]: + """ + Get metrics from the Langfuse project using a query object. + + For more details, see the [Metrics API documentation](https://langfuse.com/docs/metrics/features/metrics-api). + + Parameters + ---------- + query : str + JSON string containing the query parameters with the following structure: + ```json + { + "view": string, // Required. One of "traces", "observations", "scores-numeric", "scores-categorical" + "dimensions": [ // Optional. Default: [] + { + "field": string // Field to group by, e.g. "name", "userId", "sessionId" + } + ], + "metrics": [ // Required. At least one metric must be provided + { + "measure": string, // What to measure, e.g. "count", "latency", "value" + "aggregation": string // How to aggregate, e.g. "count", "sum", "avg", "p95", "histogram" + } + ], + "filters": [ // Optional. Default: [] + { + "column": string, // Column to filter on + "operator": string, // Operator, e.g. "=", ">", "<", "contains" + "value": any, // Value to compare against + "type": string, // Data type, e.g. "string", "number", "stringObject" + "key": string // Required only when filtering on metadata + } + ], + "timeDimension": { // Optional. Default: null. If provided, results will be grouped by time + "granularity": string // One of "minute", "hour", "day", "week", "month", "auto" + }, + "fromTimestamp": string, // Required. ISO datetime string for start of time range + "toTimestamp": string, // Required. ISO datetime string for end of time range + "orderBy": [ // Optional. Default: null + { + "field": string, // Field to order by + "direction": string // "asc" or "desc" + } + ], + "config": { // Optional. Query-specific configuration + "bins": number, // Optional. Number of bins for histogram (1-100), default: 10 + "row_limit": number // Optional. Row limit for results (1-1000) + } + } + ``` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MetricsResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/metrics", + method="GET", + params={ + "query": query, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MetricsResponse, + parse_obj_as( + type_=MetricsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawMetricsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def metrics( + self, *, query: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[MetricsResponse]: + """ + Get metrics from the Langfuse project using a query object. + + For more details, see the [Metrics API documentation](https://langfuse.com/docs/metrics/features/metrics-api). + + Parameters + ---------- + query : str + JSON string containing the query parameters with the following structure: + ```json + { + "view": string, // Required. One of "traces", "observations", "scores-numeric", "scores-categorical" + "dimensions": [ // Optional. Default: [] + { + "field": string // Field to group by, e.g. "name", "userId", "sessionId" + } + ], + "metrics": [ // Required. At least one metric must be provided + { + "measure": string, // What to measure, e.g. "count", "latency", "value" + "aggregation": string // How to aggregate, e.g. "count", "sum", "avg", "p95", "histogram" + } + ], + "filters": [ // Optional. Default: [] + { + "column": string, // Column to filter on + "operator": string, // Operator, e.g. "=", ">", "<", "contains" + "value": any, // Value to compare against + "type": string, // Data type, e.g. "string", "number", "stringObject" + "key": string // Required only when filtering on metadata + } + ], + "timeDimension": { // Optional. Default: null. If provided, results will be grouped by time + "granularity": string // One of "minute", "hour", "day", "week", "month", "auto" + }, + "fromTimestamp": string, // Required. ISO datetime string for start of time range + "toTimestamp": string, // Required. ISO datetime string for end of time range + "orderBy": [ // Optional. Default: null + { + "field": string, // Field to order by + "direction": string // "asc" or "desc" + } + ], + "config": { // Optional. Query-specific configuration + "bins": number, // Optional. Number of bins for histogram (1-100), default: 10 + "row_limit": number // Optional. Row limit for results (1-1000) + } + } + ``` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MetricsResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/metrics", + method="GET", + params={ + "query": query, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MetricsResponse, + parse_obj_as( + type_=MetricsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/metrics/types/__init__.py b/langfuse/api/resources/metrics/types/__init__.py index 7bf03027d..308847504 100644 --- a/langfuse/api/resources/metrics/types/__init__.py +++ b/langfuse/api/resources/metrics/types/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .metrics_response import MetricsResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .metrics_response import MetricsResponse +_dynamic_imports: typing.Dict[str, str] = {"MetricsResponse": ".metrics_response"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["MetricsResponse"] diff --git a/langfuse/api/resources/metrics/types/metrics_response.py b/langfuse/api/resources/metrics/types/metrics_response.py index af0121c84..2fde9153b 100644 --- a/langfuse/api/resources/metrics/types/metrics_response.py +++ b/langfuse/api/resources/metrics/types/metrics_response.py @@ -1,47 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class MetricsResponse(pydantic_v1.BaseModel): - data: typing.List[typing.Dict[str, typing.Any]] = pydantic_v1.Field() +class MetricsResponse(UniversalBaseModel): + data: typing.List[typing.Dict[str, typing.Any]] = pydantic.Field() """ The metrics data. Each item in the list contains the metric values and dimensions requested in the query. Format varies based on the query parameters. Histograms will return an array with [lower, upper, height] tuples. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/models/__init__.py b/langfuse/api/resources/models/__init__.py index a41fff3e5..7ebdb7762 100644 --- a/langfuse/api/resources/models/__init__.py +++ b/langfuse/api/resources/models/__init__.py @@ -1,5 +1,43 @@ # This file was auto-generated by Fern from our API Definition. -from .types import CreateModelRequest, PaginatedModels +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import CreateModelRequest, PaginatedModels +_dynamic_imports: typing.Dict[str, str] = { + "CreateModelRequest": ".types", + "PaginatedModels": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateModelRequest", "PaginatedModels"] diff --git a/langfuse/api/resources/models/client.py b/langfuse/api/resources/models/client.py index 4f4b727fa..08858f7e6 100644 --- a/langfuse/api/resources/models/client.py +++ b/langfuse/api/resources/models/client.py @@ -1,19 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.model import Model +from .raw_client import AsyncRawModelsClient, RawModelsClient from .types.create_model_request import CreateModelRequest from .types.paginated_models import PaginatedModels @@ -23,7 +15,18 @@ class ModelsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawModelsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawModelsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawModelsClient + """ + return self._raw_client def create( self, @@ -47,8 +50,8 @@ def create( Examples -------- - from langfuse import CreateModelRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.models import CreateModelRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -65,38 +68,10 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/models", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Model, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def list( self, @@ -125,7 +100,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -137,37 +112,10 @@ def list( ) client.models.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/models", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = self._raw_client.list( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedModels, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -188,7 +136,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -202,36 +150,8 @@ def get( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/models/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Model, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -252,7 +172,7 @@ def delete( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -266,41 +186,24 @@ def delete( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/models/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data class AsyncModelsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawModelsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawModelsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawModelsClient + """ + return self._raw_client async def create( self, @@ -326,8 +229,8 @@ async def create( -------- import asyncio - from langfuse import CreateModelRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.models import CreateModelRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -350,38 +253,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/models", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Model, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list( self, @@ -412,7 +287,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -430,37 +305,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/models", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = await self._raw_client.list( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedModels, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -483,7 +331,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -503,36 +351,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/models/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Model, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -555,7 +375,7 @@ async def delete( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -575,33 +395,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/models/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data diff --git a/langfuse/api/resources/models/raw_client.py b/langfuse/api/resources/models/raw_client.py new file mode 100644 index 000000000..7f6e59762 --- /dev/null +++ b/langfuse/api/resources/models/raw_client.py @@ -0,0 +1,857 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.model import Model +from .types.create_model_request import CreateModelRequest +from .types.paginated_models import PaginatedModels + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawModelsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + request: CreateModelRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Model]: + """ + Create a model + + Parameters + ---------- + request : CreateModelRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Model] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/models", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateModelRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Model, + parse_obj_as( + type_=Model, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedModels]: + """ + Get all models + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedModels] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/models", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedModels, + parse_obj_as( + type_=PaginatedModels, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Model]: + """ + Get a model + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Model] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/models/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Model, + parse_obj_as( + type_=Model, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + Delete a model. Cannot delete models managed by Langfuse. You can create your own definition with the same modelName to override the definition though. + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/models/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawModelsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + request: CreateModelRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Model]: + """ + Create a model + + Parameters + ---------- + request : CreateModelRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Model] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/models", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateModelRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Model, + parse_obj_as( + type_=Model, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedModels]: + """ + Get all models + + Parameters + ---------- + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedModels] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/models", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedModels, + parse_obj_as( + type_=PaginatedModels, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Model]: + """ + Get a model + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Model] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/models/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Model, + parse_obj_as( + type_=Model, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a model. Cannot delete models managed by Langfuse. You can create your own definition with the same modelName to override the definition though. + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/models/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/models/types/__init__.py b/langfuse/api/resources/models/types/__init__.py index 94285af35..8b4b651c5 100644 --- a/langfuse/api/resources/models/types/__init__.py +++ b/langfuse/api/resources/models/types/__init__.py @@ -1,6 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .create_model_request import CreateModelRequest -from .paginated_models import PaginatedModels +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_model_request import CreateModelRequest + from .paginated_models import PaginatedModels +_dynamic_imports: typing.Dict[str, str] = { + "CreateModelRequest": ".create_model_request", + "PaginatedModels": ".paginated_models", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateModelRequest", "PaginatedModels"] diff --git a/langfuse/api/resources/models/types/create_model_request.py b/langfuse/api/resources/models/types/create_model_request.py index b3d8a6462..da4e6d4dd 100644 --- a/langfuse/api/resources/models/types/create_model_request.py +++ b/langfuse/api/resources/models/types/create_model_request.py @@ -3,98 +3,82 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.model_usage_unit import ModelUsageUnit -class CreateModelRequest(pydantic_v1.BaseModel): - model_name: str = pydantic_v1.Field(alias="modelName") +class CreateModelRequest(UniversalBaseModel): + model_name: typing_extensions.Annotated[str, FieldMetadata(alias="modelName")] = ( + pydantic.Field() + ) """ Name of the model definition. If multiple with the same name exist, they are applied in the following order: (1) custom over built-in, (2) newest according to startTime where model.startTime str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/models/types/paginated_models.py b/langfuse/api/resources/models/types/paginated_models.py index 3469a1fe6..d71dc6217 100644 --- a/langfuse/api/resources/models/types/paginated_models.py +++ b/langfuse/api/resources/models/types/paginated_models.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.model import Model from ...utils.resources.pagination.types.meta_response import MetaResponse -class PaginatedModels(pydantic_v1.BaseModel): +class PaginatedModels(UniversalBaseModel): data: typing.List[Model] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/observations/__init__.py b/langfuse/api/resources/observations/__init__.py index 95fd7c721..22b445984 100644 --- a/langfuse/api/resources/observations/__init__.py +++ b/langfuse/api/resources/observations/__init__.py @@ -1,5 +1,43 @@ # This file was auto-generated by Fern from our API Definition. -from .types import Observations, ObservationsViews +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import Observations, ObservationsViews +_dynamic_imports: typing.Dict[str, str] = { + "Observations": ".types", + "ObservationsViews": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["Observations", "ObservationsViews"] diff --git a/langfuse/api/resources/observations/client.py b/langfuse/api/resources/observations/client.py index 83cc3274b..733104456 100644 --- a/langfuse/api/resources/observations/client.py +++ b/langfuse/api/resources/observations/client.py @@ -2,27 +2,29 @@ import datetime as dt import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.datetime_utils import serialize_datetime -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.observation_level import ObservationLevel from ..commons.types.observations_view import ObservationsView +from .raw_client import AsyncRawObservationsClient, RawObservationsClient from .types.observations_views import ObservationsViews class ObservationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawObservationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawObservationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawObservationsClient + """ + return self._raw_client def get( self, @@ -47,7 +49,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -61,36 +63,10 @@ def get( observation_id="observationId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/observations/{jsonable_encoder(observation_id)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get( + observation_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ObservationsView, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_many( self, @@ -254,7 +230,7 @@ def get_many( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -266,60 +242,39 @@ def get_many( ) client.observations.get_many() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/observations", - method="GET", - params={ - "page": page, - "limit": limit, - "name": name, - "userId": user_id, - "type": type, - "traceId": trace_id, - "level": level, - "parentObservationId": parent_observation_id, - "environment": environment, - "fromStartTime": serialize_datetime(from_start_time) - if from_start_time is not None - else None, - "toStartTime": serialize_datetime(to_start_time) - if to_start_time is not None - else None, - "version": version, - "filter": filter, - }, + _response = self._raw_client.get_many( + page=page, + limit=limit, + name=name, + user_id=user_id, + type=type, + trace_id=trace_id, + level=level, + parent_observation_id=parent_observation_id, + environment=environment, + from_start_time=from_start_time, + to_start_time=to_start_time, + version=version, + filter=filter, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ObservationsViews, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncObservationsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawObservationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawObservationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawObservationsClient + """ + return self._raw_client async def get( self, @@ -346,7 +301,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -366,36 +321,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/observations/{jsonable_encoder(observation_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get( + observation_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ObservationsView, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_many( self, @@ -561,7 +490,7 @@ async def get_many( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -579,52 +508,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/observations", - method="GET", - params={ - "page": page, - "limit": limit, - "name": name, - "userId": user_id, - "type": type, - "traceId": trace_id, - "level": level, - "parentObservationId": parent_observation_id, - "environment": environment, - "fromStartTime": serialize_datetime(from_start_time) - if from_start_time is not None - else None, - "toStartTime": serialize_datetime(to_start_time) - if to_start_time is not None - else None, - "version": version, - "filter": filter, - }, + _response = await self._raw_client.get_many( + page=page, + limit=limit, + name=name, + user_id=user_id, + type=type, + trace_id=trace_id, + level=level, + parent_observation_id=parent_observation_id, + environment=environment, + from_start_time=from_start_time, + to_start_time=to_start_time, + version=version, + filter=filter, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ObservationsViews, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/observations/raw_client.py b/langfuse/api/resources/observations/raw_client.py new file mode 100644 index 000000000..a712bb38c --- /dev/null +++ b/langfuse/api/resources/observations/raw_client.py @@ -0,0 +1,763 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.datetime_utils import serialize_datetime +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.observation_level import ObservationLevel +from ..commons.types.observations_view import ObservationsView +from .types.observations_views import ObservationsViews + + +class RawObservationsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get( + self, + observation_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ObservationsView]: + """ + Get a observation + + Parameters + ---------- + observation_id : str + The unique langfuse identifier of an observation, can be an event, span or generation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ObservationsView] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/observations/{jsonable_encoder(observation_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ObservationsView, + parse_obj_as( + type_=ObservationsView, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_many( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + name: typing.Optional[str] = None, + user_id: typing.Optional[str] = None, + type: typing.Optional[str] = None, + trace_id: typing.Optional[str] = None, + level: typing.Optional[ObservationLevel] = None, + parent_observation_id: typing.Optional[str] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + from_start_time: typing.Optional[dt.datetime] = None, + to_start_time: typing.Optional[dt.datetime] = None, + version: typing.Optional[str] = None, + filter: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ObservationsViews]: + """ + Get a list of observations + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + name : typing.Optional[str] + + user_id : typing.Optional[str] + + type : typing.Optional[str] + + trace_id : typing.Optional[str] + + level : typing.Optional[ObservationLevel] + Optional filter for observations with a specific level (e.g. "DEBUG", "DEFAULT", "WARNING", "ERROR"). + + parent_observation_id : typing.Optional[str] + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for observations where the environment is one of the provided values. + + from_start_time : typing.Optional[dt.datetime] + Retrieve only observations with a start_time on or after this datetime (ISO 8601). + + to_start_time : typing.Optional[dt.datetime] + Retrieve only observations with a start_time before this datetime (ISO 8601). + + version : typing.Optional[str] + Optional filter to only include observations with a certain version. + + filter : typing.Optional[str] + JSON string containing an array of filter conditions. When provided, this takes precedence over query parameter filters (userId, name, type, level, environment, fromStartTime, ...). + + ## Filter Structure + Each filter condition has the following structure: + ```json + [ + { + "type": string, // Required. One of: "datetime", "string", "number", "stringOptions", "categoryOptions", "arrayOptions", "stringObject", "numberObject", "boolean", "null" + "column": string, // Required. Column to filter on (see available columns below) + "operator": string, // Required. Operator based on type: + // - datetime: ">", "<", ">=", "<=" + // - string: "=", "contains", "does not contain", "starts with", "ends with" + // - stringOptions: "any of", "none of" + // - categoryOptions: "any of", "none of" + // - arrayOptions: "any of", "none of", "all of" + // - number: "=", ">", "<", ">=", "<=" + // - stringObject: "=", "contains", "does not contain", "starts with", "ends with" + // - numberObject: "=", ">", "<", ">=", "<=" + // - boolean: "=", "<>" + // - null: "is null", "is not null" + "value": any, // Required (except for null type). Value to compare against. Type depends on filter type + "key": string // Required only for stringObject, numberObject, and categoryOptions types when filtering on nested fields like metadata + } + ] + ``` + + ## Available Columns + + ### Core Observation Fields + - `id` (string) - Observation ID + - `type` (string) - Observation type (SPAN, GENERATION, EVENT) + - `name` (string) - Observation name + - `traceId` (string) - Associated trace ID + - `startTime` (datetime) - Observation start time + - `endTime` (datetime) - Observation end time + - `environment` (string) - Environment tag + - `level` (string) - Log level (DEBUG, DEFAULT, WARNING, ERROR) + - `statusMessage` (string) - Status message + - `version` (string) - Version tag + + ### Performance Metrics + - `latency` (number) - Latency in seconds (calculated: end_time - start_time) + - `timeToFirstToken` (number) - Time to first token in seconds + - `tokensPerSecond` (number) - Output tokens per second + + ### Token Usage + - `inputTokens` (number) - Number of input tokens + - `outputTokens` (number) - Number of output tokens + - `totalTokens` (number) - Total tokens (alias: `tokens`) + + ### Cost Metrics + - `inputCost` (number) - Input cost in USD + - `outputCost` (number) - Output cost in USD + - `totalCost` (number) - Total cost in USD + + ### Model Information + - `model` (string) - Provided model name + - `promptName` (string) - Associated prompt name + - `promptVersion` (number) - Associated prompt version + + ### Structured Data + - `metadata` (stringObject/numberObject/categoryOptions) - Metadata key-value pairs. Use `key` parameter to filter on specific metadata keys. + + ### Scores (requires join with scores table) + - `scores_avg` (number) - Average of numeric scores (alias: `scores`) + - `score_categories` (categoryOptions) - Categorical score values + + ### Associated Trace Fields (requires join with traces table) + - `userId` (string) - User ID from associated trace + - `traceName` (string) - Name from associated trace + - `traceEnvironment` (string) - Environment from associated trace + - `traceTags` (arrayOptions) - Tags from associated trace + + ## Filter Examples + ```json + [ + { + "type": "string", + "column": "type", + "operator": "=", + "value": "GENERATION" + }, + { + "type": "number", + "column": "latency", + "operator": ">=", + "value": 2.5 + }, + { + "type": "stringObject", + "column": "metadata", + "key": "environment", + "operator": "=", + "value": "production" + } + ] + ``` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ObservationsViews] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/observations", + method="GET", + params={ + "page": page, + "limit": limit, + "name": name, + "userId": user_id, + "type": type, + "traceId": trace_id, + "level": level, + "parentObservationId": parent_observation_id, + "environment": environment, + "fromStartTime": serialize_datetime(from_start_time) + if from_start_time is not None + else None, + "toStartTime": serialize_datetime(to_start_time) + if to_start_time is not None + else None, + "version": version, + "filter": filter, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ObservationsViews, + parse_obj_as( + type_=ObservationsViews, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawObservationsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, + observation_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ObservationsView]: + """ + Get a observation + + Parameters + ---------- + observation_id : str + The unique langfuse identifier of an observation, can be an event, span or generation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ObservationsView] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/observations/{jsonable_encoder(observation_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ObservationsView, + parse_obj_as( + type_=ObservationsView, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_many( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + name: typing.Optional[str] = None, + user_id: typing.Optional[str] = None, + type: typing.Optional[str] = None, + trace_id: typing.Optional[str] = None, + level: typing.Optional[ObservationLevel] = None, + parent_observation_id: typing.Optional[str] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + from_start_time: typing.Optional[dt.datetime] = None, + to_start_time: typing.Optional[dt.datetime] = None, + version: typing.Optional[str] = None, + filter: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ObservationsViews]: + """ + Get a list of observations + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + name : typing.Optional[str] + + user_id : typing.Optional[str] + + type : typing.Optional[str] + + trace_id : typing.Optional[str] + + level : typing.Optional[ObservationLevel] + Optional filter for observations with a specific level (e.g. "DEBUG", "DEFAULT", "WARNING", "ERROR"). + + parent_observation_id : typing.Optional[str] + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for observations where the environment is one of the provided values. + + from_start_time : typing.Optional[dt.datetime] + Retrieve only observations with a start_time on or after this datetime (ISO 8601). + + to_start_time : typing.Optional[dt.datetime] + Retrieve only observations with a start_time before this datetime (ISO 8601). + + version : typing.Optional[str] + Optional filter to only include observations with a certain version. + + filter : typing.Optional[str] + JSON string containing an array of filter conditions. When provided, this takes precedence over query parameter filters (userId, name, type, level, environment, fromStartTime, ...). + + ## Filter Structure + Each filter condition has the following structure: + ```json + [ + { + "type": string, // Required. One of: "datetime", "string", "number", "stringOptions", "categoryOptions", "arrayOptions", "stringObject", "numberObject", "boolean", "null" + "column": string, // Required. Column to filter on (see available columns below) + "operator": string, // Required. Operator based on type: + // - datetime: ">", "<", ">=", "<=" + // - string: "=", "contains", "does not contain", "starts with", "ends with" + // - stringOptions: "any of", "none of" + // - categoryOptions: "any of", "none of" + // - arrayOptions: "any of", "none of", "all of" + // - number: "=", ">", "<", ">=", "<=" + // - stringObject: "=", "contains", "does not contain", "starts with", "ends with" + // - numberObject: "=", ">", "<", ">=", "<=" + // - boolean: "=", "<>" + // - null: "is null", "is not null" + "value": any, // Required (except for null type). Value to compare against. Type depends on filter type + "key": string // Required only for stringObject, numberObject, and categoryOptions types when filtering on nested fields like metadata + } + ] + ``` + + ## Available Columns + + ### Core Observation Fields + - `id` (string) - Observation ID + - `type` (string) - Observation type (SPAN, GENERATION, EVENT) + - `name` (string) - Observation name + - `traceId` (string) - Associated trace ID + - `startTime` (datetime) - Observation start time + - `endTime` (datetime) - Observation end time + - `environment` (string) - Environment tag + - `level` (string) - Log level (DEBUG, DEFAULT, WARNING, ERROR) + - `statusMessage` (string) - Status message + - `version` (string) - Version tag + + ### Performance Metrics + - `latency` (number) - Latency in seconds (calculated: end_time - start_time) + - `timeToFirstToken` (number) - Time to first token in seconds + - `tokensPerSecond` (number) - Output tokens per second + + ### Token Usage + - `inputTokens` (number) - Number of input tokens + - `outputTokens` (number) - Number of output tokens + - `totalTokens` (number) - Total tokens (alias: `tokens`) + + ### Cost Metrics + - `inputCost` (number) - Input cost in USD + - `outputCost` (number) - Output cost in USD + - `totalCost` (number) - Total cost in USD + + ### Model Information + - `model` (string) - Provided model name + - `promptName` (string) - Associated prompt name + - `promptVersion` (number) - Associated prompt version + + ### Structured Data + - `metadata` (stringObject/numberObject/categoryOptions) - Metadata key-value pairs. Use `key` parameter to filter on specific metadata keys. + + ### Scores (requires join with scores table) + - `scores_avg` (number) - Average of numeric scores (alias: `scores`) + - `score_categories` (categoryOptions) - Categorical score values + + ### Associated Trace Fields (requires join with traces table) + - `userId` (string) - User ID from associated trace + - `traceName` (string) - Name from associated trace + - `traceEnvironment` (string) - Environment from associated trace + - `traceTags` (arrayOptions) - Tags from associated trace + + ## Filter Examples + ```json + [ + { + "type": "string", + "column": "type", + "operator": "=", + "value": "GENERATION" + }, + { + "type": "number", + "column": "latency", + "operator": ">=", + "value": 2.5 + }, + { + "type": "stringObject", + "column": "metadata", + "key": "environment", + "operator": "=", + "value": "production" + } + ] + ``` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ObservationsViews] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/observations", + method="GET", + params={ + "page": page, + "limit": limit, + "name": name, + "userId": user_id, + "type": type, + "traceId": trace_id, + "level": level, + "parentObservationId": parent_observation_id, + "environment": environment, + "fromStartTime": serialize_datetime(from_start_time) + if from_start_time is not None + else None, + "toStartTime": serialize_datetime(to_start_time) + if to_start_time is not None + else None, + "version": version, + "filter": filter, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ObservationsViews, + parse_obj_as( + type_=ObservationsViews, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/observations/types/__init__.py b/langfuse/api/resources/observations/types/__init__.py index 60f9d4e01..247b674a1 100644 --- a/langfuse/api/resources/observations/types/__init__.py +++ b/langfuse/api/resources/observations/types/__init__.py @@ -1,6 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .observations import Observations -from .observations_views import ObservationsViews +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .observations import Observations + from .observations_views import ObservationsViews +_dynamic_imports: typing.Dict[str, str] = { + "Observations": ".observations", + "ObservationsViews": ".observations_views", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["Observations", "ObservationsViews"] diff --git a/langfuse/api/resources/observations/types/observations.py b/langfuse/api/resources/observations/types/observations.py index 1534dc87e..689495bc6 100644 --- a/langfuse/api/resources/observations/types/observations.py +++ b/langfuse/api/resources/observations/types/observations.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.observation import Observation from ...utils.resources.pagination.types.meta_response import MetaResponse -class Observations(pydantic_v1.BaseModel): +class Observations(UniversalBaseModel): data: typing.List[Observation] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/observations/types/observations_views.py b/langfuse/api/resources/observations/types/observations_views.py index ed86b7d1e..65e5774a5 100644 --- a/langfuse/api/resources/observations/types/observations_views.py +++ b/langfuse/api/resources/observations/types/observations_views.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.observations_view import ObservationsView from ...utils.resources.pagination.types.meta_response import MetaResponse -class ObservationsViews(pydantic_v1.BaseModel): +class ObservationsViews(UniversalBaseModel): data: typing.List[ObservationsView] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/__init__.py b/langfuse/api/resources/opentelemetry/__init__.py index bada2052f..30caa3796 100644 --- a/langfuse/api/resources/opentelemetry/__init__.py +++ b/langfuse/api/resources/opentelemetry/__init__.py @@ -1,15 +1,59 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - OtelAttribute, - OtelAttributeValue, - OtelResource, - OtelResourceSpan, - OtelScope, - OtelScopeSpan, - OtelSpan, - OtelTraceResponse, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + OtelAttribute, + OtelAttributeValue, + OtelResource, + OtelResourceSpan, + OtelScope, + OtelScopeSpan, + OtelSpan, + OtelTraceResponse, + ) +_dynamic_imports: typing.Dict[str, str] = { + "OtelAttribute": ".types", + "OtelAttributeValue": ".types", + "OtelResource": ".types", + "OtelResourceSpan": ".types", + "OtelScope": ".types", + "OtelScopeSpan": ".types", + "OtelSpan": ".types", + "OtelTraceResponse": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "OtelAttribute", diff --git a/langfuse/api/resources/opentelemetry/client.py b/langfuse/api/resources/opentelemetry/client.py index de17949d4..52fea702e 100644 --- a/langfuse/api/resources/opentelemetry/client.py +++ b/langfuse/api/resources/opentelemetry/client.py @@ -1,17 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawOpentelemetryClient, RawOpentelemetryClient from .types.otel_resource_span import OtelResourceSpan from .types.otel_trace_response import OtelTraceResponse @@ -21,7 +14,18 @@ class OpentelemetryClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawOpentelemetryClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawOpentelemetryClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawOpentelemetryClient + """ + return self._raw_client def export_traces( self, @@ -61,7 +65,8 @@ def export_traces( Examples -------- - from langfuse import ( + from langfuse import FernLangfuse + from langfuse.resources.opentelemetry import ( OtelAttribute, OtelAttributeValue, OtelResource, @@ -70,7 +75,6 @@ def export_traces( OtelScopeSpan, OtelSpan, ) - from langfuse.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -130,43 +134,26 @@ def export_traces( ], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/otel/v1/traces", - method="POST", - json={"resourceSpans": resource_spans}, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.export_traces( + resource_spans=resource_spans, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OtelTraceResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncOpentelemetryClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawOpentelemetryClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawOpentelemetryClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawOpentelemetryClient + """ + return self._raw_client async def export_traces( self, @@ -208,7 +195,8 @@ async def export_traces( -------- import asyncio - from langfuse import ( + from langfuse import AsyncFernLangfuse + from langfuse.resources.opentelemetry import ( OtelAttribute, OtelAttributeValue, OtelResource, @@ -217,7 +205,6 @@ async def export_traces( OtelScopeSpan, OtelSpan, ) - from langfuse.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -283,35 +270,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/otel/v1/traces", - method="POST", - json={"resourceSpans": resource_spans}, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.export_traces( + resource_spans=resource_spans, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OtelTraceResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/opentelemetry/raw_client.py b/langfuse/api/resources/opentelemetry/raw_client.py new file mode 100644 index 000000000..0abcc10c9 --- /dev/null +++ b/langfuse/api/resources/opentelemetry/raw_client.py @@ -0,0 +1,291 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.otel_resource_span import OtelResourceSpan +from .types.otel_trace_response import OtelTraceResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawOpentelemetryClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def export_traces( + self, + *, + resource_spans: typing.Sequence[OtelResourceSpan], + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[OtelTraceResponse]: + """ + **OpenTelemetry Traces Ingestion Endpoint** + + This endpoint implements the OTLP/HTTP specification for trace ingestion, providing native OpenTelemetry integration for Langfuse Observability. + + **Supported Formats:** + - Binary Protobuf: `Content-Type: application/x-protobuf` + - JSON Protobuf: `Content-Type: application/json` + - Supports gzip compression via `Content-Encoding: gzip` header + + **Specification Compliance:** + - Conforms to [OTLP/HTTP Trace Export](https://opentelemetry.io/docs/specs/otlp/#otlphttp) + - Implements `ExportTraceServiceRequest` message format + + **Documentation:** + - Integration guide: https://langfuse.com/integrations/native/opentelemetry + - Data model: https://langfuse.com/docs/observability/data-model + + Parameters + ---------- + resource_spans : typing.Sequence[OtelResourceSpan] + Array of resource spans containing trace data as defined in the OTLP specification + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[OtelTraceResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/otel/v1/traces", + method="POST", + json={ + "resourceSpans": convert_and_respect_annotation_metadata( + object_=resource_spans, + annotation=typing.Sequence[OtelResourceSpan], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + OtelTraceResponse, + parse_obj_as( + type_=OtelTraceResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawOpentelemetryClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def export_traces( + self, + *, + resource_spans: typing.Sequence[OtelResourceSpan], + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[OtelTraceResponse]: + """ + **OpenTelemetry Traces Ingestion Endpoint** + + This endpoint implements the OTLP/HTTP specification for trace ingestion, providing native OpenTelemetry integration for Langfuse Observability. + + **Supported Formats:** + - Binary Protobuf: `Content-Type: application/x-protobuf` + - JSON Protobuf: `Content-Type: application/json` + - Supports gzip compression via `Content-Encoding: gzip` header + + **Specification Compliance:** + - Conforms to [OTLP/HTTP Trace Export](https://opentelemetry.io/docs/specs/otlp/#otlphttp) + - Implements `ExportTraceServiceRequest` message format + + **Documentation:** + - Integration guide: https://langfuse.com/integrations/native/opentelemetry + - Data model: https://langfuse.com/docs/observability/data-model + + Parameters + ---------- + resource_spans : typing.Sequence[OtelResourceSpan] + Array of resource spans containing trace data as defined in the OTLP specification + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[OtelTraceResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/otel/v1/traces", + method="POST", + json={ + "resourceSpans": convert_and_respect_annotation_metadata( + object_=resource_spans, + annotation=typing.Sequence[OtelResourceSpan], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + OtelTraceResponse, + parse_obj_as( + type_=OtelTraceResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/opentelemetry/types/__init__.py b/langfuse/api/resources/opentelemetry/types/__init__.py index 4ca603db6..ad2fd4899 100644 --- a/langfuse/api/resources/opentelemetry/types/__init__.py +++ b/langfuse/api/resources/opentelemetry/types/__init__.py @@ -1,13 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -from .otel_attribute import OtelAttribute -from .otel_attribute_value import OtelAttributeValue -from .otel_resource import OtelResource -from .otel_resource_span import OtelResourceSpan -from .otel_scope import OtelScope -from .otel_scope_span import OtelScopeSpan -from .otel_span import OtelSpan -from .otel_trace_response import OtelTraceResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .otel_attribute import OtelAttribute + from .otel_attribute_value import OtelAttributeValue + from .otel_resource import OtelResource + from .otel_resource_span import OtelResourceSpan + from .otel_scope import OtelScope + from .otel_scope_span import OtelScopeSpan + from .otel_span import OtelSpan + from .otel_trace_response import OtelTraceResponse +_dynamic_imports: typing.Dict[str, str] = { + "OtelAttribute": ".otel_attribute", + "OtelAttributeValue": ".otel_attribute_value", + "OtelResource": ".otel_resource", + "OtelResourceSpan": ".otel_resource_span", + "OtelScope": ".otel_scope", + "OtelScopeSpan": ".otel_scope_span", + "OtelSpan": ".otel_span", + "OtelTraceResponse": ".otel_trace_response", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "OtelAttribute", diff --git a/langfuse/api/resources/opentelemetry/types/otel_attribute.py b/langfuse/api/resources/opentelemetry/types/otel_attribute.py index 91b9e2b70..b7d6a7497 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_attribute.py +++ b/langfuse/api/resources/opentelemetry/types/otel_attribute.py @@ -1,55 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .otel_attribute_value import OtelAttributeValue -class OtelAttribute(pydantic_v1.BaseModel): +class OtelAttribute(UniversalBaseModel): """ Key-value attribute pair for resources, scopes, or spans """ - key: typing.Optional[str] = pydantic_v1.Field(default=None) + key: typing.Optional[str] = pydantic.Field(default=None) """ Attribute key (e.g., "service.name", "langfuse.observation.type") """ - value: typing.Optional[OtelAttributeValue] = pydantic_v1.Field(default=None) + value: typing.Optional[OtelAttributeValue] = pydantic.Field(default=None) """ Attribute value """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_attribute_value.py b/langfuse/api/resources/opentelemetry/types/otel_attribute_value.py index 51f026495..ef63d7b45 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_attribute_value.py +++ b/langfuse/api/resources/opentelemetry/types/otel_attribute_value.py @@ -1,72 +1,53 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class OtelAttributeValue(pydantic_v1.BaseModel): +class OtelAttributeValue(UniversalBaseModel): """ Attribute value wrapper supporting different value types """ - string_value: typing.Optional[str] = pydantic_v1.Field( - alias="stringValue", default=None - ) + string_value: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="stringValue") + ] = pydantic.Field(default=None) """ String value """ - int_value: typing.Optional[int] = pydantic_v1.Field(alias="intValue", default=None) + int_value: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="intValue") + ] = pydantic.Field(default=None) """ Integer value """ - double_value: typing.Optional[float] = pydantic_v1.Field( - alias="doubleValue", default=None - ) + double_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="doubleValue") + ] = pydantic.Field(default=None) """ Double value """ - bool_value: typing.Optional[bool] = pydantic_v1.Field( - alias="boolValue", default=None - ) + bool_value: typing_extensions.Annotated[ + typing.Optional[bool], FieldMetadata(alias="boolValue") + ] = pydantic.Field(default=None) """ Boolean value """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_resource.py b/langfuse/api/resources/opentelemetry/types/otel_resource.py index 0d76d5a15..9770cda4c 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_resource.py +++ b/langfuse/api/resources/opentelemetry/types/otel_resource.py @@ -1,52 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .otel_attribute import OtelAttribute -class OtelResource(pydantic_v1.BaseModel): +class OtelResource(UniversalBaseModel): """ Resource attributes identifying the source of telemetry """ - attributes: typing.Optional[typing.List[OtelAttribute]] = pydantic_v1.Field( + attributes: typing.Optional[typing.List[OtelAttribute]] = pydantic.Field( default=None ) """ Resource attributes like service.name, service.version, etc. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_resource_span.py b/langfuse/api/resources/opentelemetry/types/otel_resource_span.py index e270ba7d8..11a771102 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_resource_span.py +++ b/langfuse/api/resources/opentelemetry/types/otel_resource_span.py @@ -1,60 +1,39 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .otel_resource import OtelResource from .otel_scope_span import OtelScopeSpan -class OtelResourceSpan(pydantic_v1.BaseModel): +class OtelResourceSpan(UniversalBaseModel): """ Represents a collection of spans from a single resource as per OTLP specification """ - resource: typing.Optional[OtelResource] = pydantic_v1.Field(default=None) + resource: typing.Optional[OtelResource] = pydantic.Field(default=None) """ Resource information """ - scope_spans: typing.Optional[typing.List[OtelScopeSpan]] = pydantic_v1.Field( - alias="scopeSpans", default=None - ) + scope_spans: typing_extensions.Annotated[ + typing.Optional[typing.List[OtelScopeSpan]], FieldMetadata(alias="scopeSpans") + ] = pydantic.Field(default=None) """ Array of scope spans """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_scope.py b/langfuse/api/resources/opentelemetry/types/otel_scope.py index 71e9b75b8..25061b52a 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_scope.py +++ b/langfuse/api/resources/opentelemetry/types/otel_scope.py @@ -1,62 +1,41 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .otel_attribute import OtelAttribute -class OtelScope(pydantic_v1.BaseModel): +class OtelScope(UniversalBaseModel): """ Instrumentation scope information """ - name: typing.Optional[str] = pydantic_v1.Field(default=None) + name: typing.Optional[str] = pydantic.Field(default=None) """ Instrumentation scope name """ - version: typing.Optional[str] = pydantic_v1.Field(default=None) + version: typing.Optional[str] = pydantic.Field(default=None) """ Instrumentation scope version """ - attributes: typing.Optional[typing.List[OtelAttribute]] = pydantic_v1.Field( + attributes: typing.Optional[typing.List[OtelAttribute]] = pydantic.Field( default=None ) """ Additional scope attributes """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_scope_span.py b/langfuse/api/resources/opentelemetry/types/otel_scope_span.py index 854951a60..452b8f4f1 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_scope_span.py +++ b/langfuse/api/resources/opentelemetry/types/otel_scope_span.py @@ -1,56 +1,35 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .otel_scope import OtelScope from .otel_span import OtelSpan -class OtelScopeSpan(pydantic_v1.BaseModel): +class OtelScopeSpan(UniversalBaseModel): """ Collection of spans from a single instrumentation scope """ - scope: typing.Optional[OtelScope] = pydantic_v1.Field(default=None) + scope: typing.Optional[OtelScope] = pydantic.Field(default=None) """ Instrumentation scope information """ - spans: typing.Optional[typing.List[OtelSpan]] = pydantic_v1.Field(default=None) + spans: typing.Optional[typing.List[OtelSpan]] = pydantic.Field(default=None) """ Array of spans """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_span.py b/langfuse/api/resources/opentelemetry/types/otel_span.py index 08b7be7fb..50144a0c9 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_span.py +++ b/langfuse/api/resources/opentelemetry/types/otel_span.py @@ -1,104 +1,83 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .otel_attribute import OtelAttribute -class OtelSpan(pydantic_v1.BaseModel): +class OtelSpan(UniversalBaseModel): """ Individual span representing a unit of work or operation """ - trace_id: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="traceId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="traceId") + ] = pydantic.Field(default=None) """ Trace ID (16 bytes, hex-encoded string in JSON or Buffer in binary) """ - span_id: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="spanId", default=None - ) + span_id: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="spanId") + ] = pydantic.Field(default=None) """ Span ID (8 bytes, hex-encoded string in JSON or Buffer in binary) """ - parent_span_id: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="parentSpanId", default=None - ) + parent_span_id: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="parentSpanId") + ] = pydantic.Field(default=None) """ Parent span ID if this is a child span """ - name: typing.Optional[str] = pydantic_v1.Field(default=None) + name: typing.Optional[str] = pydantic.Field(default=None) """ Span name describing the operation """ - kind: typing.Optional[int] = pydantic_v1.Field(default=None) + kind: typing.Optional[int] = pydantic.Field(default=None) """ Span kind (1=INTERNAL, 2=SERVER, 3=CLIENT, 4=PRODUCER, 5=CONSUMER) """ - start_time_unix_nano: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="startTimeUnixNano", default=None - ) + start_time_unix_nano: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="startTimeUnixNano") + ] = pydantic.Field(default=None) """ Start time in nanoseconds since Unix epoch """ - end_time_unix_nano: typing.Optional[typing.Any] = pydantic_v1.Field( - alias="endTimeUnixNano", default=None - ) + end_time_unix_nano: typing_extensions.Annotated[ + typing.Optional[typing.Any], FieldMetadata(alias="endTimeUnixNano") + ] = pydantic.Field(default=None) """ End time in nanoseconds since Unix epoch """ - attributes: typing.Optional[typing.List[OtelAttribute]] = pydantic_v1.Field( + attributes: typing.Optional[typing.List[OtelAttribute]] = pydantic.Field( default=None ) """ Span attributes including Langfuse-specific attributes (langfuse.observation.*) """ - status: typing.Optional[typing.Any] = pydantic_v1.Field(default=None) + status: typing.Optional[typing.Any] = pydantic.Field(default=None) """ Span status object """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/opentelemetry/types/otel_trace_response.py b/langfuse/api/resources/opentelemetry/types/otel_trace_response.py index ef9897f06..809ccd4c0 100644 --- a/langfuse/api/resources/opentelemetry/types/otel_trace_response.py +++ b/langfuse/api/resources/opentelemetry/types/otel_trace_response.py @@ -1,44 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class OtelTraceResponse(pydantic_v1.BaseModel): +class OtelTraceResponse(UniversalBaseModel): """ Response from trace export request. Empty object indicates success. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/__init__.py b/langfuse/api/resources/organizations/__init__.py index 36249ae36..469d10a42 100644 --- a/langfuse/api/resources/organizations/__init__.py +++ b/langfuse/api/resources/organizations/__init__.py @@ -1,17 +1,63 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - DeleteMembershipRequest, - MembershipDeletionResponse, - MembershipRequest, - MembershipResponse, - MembershipRole, - MembershipsResponse, - OrganizationApiKey, - OrganizationApiKeysResponse, - OrganizationProject, - OrganizationProjectsResponse, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + DeleteMembershipRequest, + MembershipDeletionResponse, + MembershipRequest, + MembershipResponse, + MembershipRole, + MembershipsResponse, + OrganizationApiKey, + OrganizationApiKeysResponse, + OrganizationProject, + OrganizationProjectsResponse, + ) +_dynamic_imports: typing.Dict[str, str] = { + "DeleteMembershipRequest": ".types", + "MembershipDeletionResponse": ".types", + "MembershipRequest": ".types", + "MembershipResponse": ".types", + "MembershipRole": ".types", + "MembershipsResponse": ".types", + "OrganizationApiKey": ".types", + "OrganizationApiKeysResponse": ".types", + "OrganizationProject": ".types", + "OrganizationProjectsResponse": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "DeleteMembershipRequest", diff --git a/langfuse/api/resources/organizations/client.py b/langfuse/api/resources/organizations/client.py index 1e7bcd117..59590eeed 100644 --- a/langfuse/api/resources/organizations/client.py +++ b/langfuse/api/resources/organizations/client.py @@ -1,18 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawOrganizationsClient, RawOrganizationsClient from .types.delete_membership_request import DeleteMembershipRequest from .types.membership_deletion_response import MembershipDeletionResponse from .types.membership_request import MembershipRequest @@ -27,7 +19,18 @@ class OrganizationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawOrganizationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawOrganizationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawOrganizationsClient + """ + return self._raw_client def get_organization_memberships( self, *, request_options: typing.Optional[RequestOptions] = None @@ -46,7 +49,7 @@ def get_organization_memberships( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -58,36 +61,10 @@ def get_organization_memberships( ) client.organizations.get_organization_memberships() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/organizations/memberships", - method="GET", - request_options=request_options, + _response = self._raw_client.get_organization_memberships( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def update_organization_membership( self, @@ -111,8 +88,8 @@ def update_organization_membership( Examples -------- - from langfuse import MembershipRequest, MembershipRole - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.organizations import MembershipRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -125,42 +102,14 @@ def update_organization_membership( client.organizations.update_organization_membership( request=MembershipRequest( user_id="userId", - role=MembershipRole.OWNER, + role="OWNER", ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/organizations/memberships", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update_organization_membership( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_organization_membership( self, @@ -184,8 +133,8 @@ def delete_organization_membership( Examples -------- - from langfuse import DeleteMembershipRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.organizations import DeleteMembershipRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -201,40 +150,10 @@ def delete_organization_membership( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/organizations/memberships", - method="DELETE", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.delete_organization_membership( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - MembershipDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_project_memberships( self, @@ -258,7 +177,7 @@ def get_project_memberships( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -272,36 +191,10 @@ def get_project_memberships( project_id="projectId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/memberships", - method="GET", - request_options=request_options, + _response = self._raw_client.get_project_memberships( + project_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def update_project_membership( self, @@ -328,8 +221,8 @@ def update_project_membership( Examples -------- - from langfuse import MembershipRequest, MembershipRole - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.organizations import MembershipRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -343,42 +236,14 @@ def update_project_membership( project_id="projectId", request=MembershipRequest( user_id="userId", - role=MembershipRole.OWNER, + role="OWNER", ), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/memberships", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update_project_membership( + project_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_project_membership( self, @@ -405,8 +270,8 @@ def delete_project_membership( Examples -------- - from langfuse import DeleteMembershipRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.organizations import DeleteMembershipRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -423,40 +288,10 @@ def delete_project_membership( ), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/memberships", - method="DELETE", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.delete_project_membership( + project_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - MembershipDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_organization_projects( self, *, request_options: typing.Optional[RequestOptions] = None @@ -475,7 +310,7 @@ def get_organization_projects( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -487,38 +322,10 @@ def get_organization_projects( ) client.organizations.get_organization_projects() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/organizations/projects", - method="GET", - request_options=request_options, + _response = self._raw_client.get_organization_projects( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - OrganizationProjectsResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_organization_api_keys( self, *, request_options: typing.Optional[RequestOptions] = None @@ -537,7 +344,7 @@ def get_organization_api_keys( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -549,43 +356,26 @@ def get_organization_api_keys( ) client.organizations.get_organization_api_keys() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/organizations/apiKeys", - method="GET", - request_options=request_options, + _response = self._raw_client.get_organization_api_keys( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - OrganizationApiKeysResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncOrganizationsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawOrganizationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawOrganizationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawOrganizationsClient + """ + return self._raw_client async def get_organization_memberships( self, *, request_options: typing.Optional[RequestOptions] = None @@ -606,7 +396,7 @@ async def get_organization_memberships( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -624,36 +414,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/organizations/memberships", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_organization_memberships( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def update_organization_membership( self, @@ -679,8 +443,8 @@ async def update_organization_membership( -------- import asyncio - from langfuse import MembershipRequest, MembershipRole - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.organizations import MembershipRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -696,45 +460,17 @@ async def main() -> None: await client.organizations.update_organization_membership( request=MembershipRequest( user_id="userId", - role=MembershipRole.OWNER, + role="OWNER", ), ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/organizations/memberships", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update_organization_membership( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_organization_membership( self, @@ -760,8 +496,8 @@ async def delete_organization_membership( -------- import asyncio - from langfuse import DeleteMembershipRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.organizations import DeleteMembershipRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -783,40 +519,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/organizations/memberships", - method="DELETE", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.delete_organization_membership( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - MembershipDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_project_memberships( self, @@ -842,7 +548,7 @@ async def get_project_memberships( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -862,36 +568,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/memberships", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_project_memberships( + project_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipsResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def update_project_membership( self, @@ -920,8 +600,8 @@ async def update_project_membership( -------- import asyncio - from langfuse import MembershipRequest, MembershipRole - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.organizations import MembershipRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -938,45 +618,17 @@ async def main() -> None: project_id="projectId", request=MembershipRequest( user_id="userId", - role=MembershipRole.OWNER, + role="OWNER", ), ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/memberships", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update_project_membership( + project_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembershipResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_project_membership( self, @@ -1005,8 +657,8 @@ async def delete_project_membership( -------- import asyncio - from langfuse import DeleteMembershipRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.organizations import DeleteMembershipRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1029,40 +681,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/memberships", - method="DELETE", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.delete_project_membership( + project_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - MembershipDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_organization_projects( self, *, request_options: typing.Optional[RequestOptions] = None @@ -1083,7 +705,7 @@ async def get_organization_projects( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1101,38 +723,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/organizations/projects", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_organization_projects( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - OrganizationProjectsResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_organization_api_keys( self, *, request_options: typing.Optional[RequestOptions] = None @@ -1153,7 +747,7 @@ async def get_organization_api_keys( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1171,35 +765,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/organizations/apiKeys", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_organization_api_keys( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - OrganizationApiKeysResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/organizations/raw_client.py b/langfuse/api/resources/organizations/raw_client.py new file mode 100644 index 000000000..9443e5668 --- /dev/null +++ b/langfuse/api/resources/organizations/raw_client.py @@ -0,0 +1,1699 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.delete_membership_request import DeleteMembershipRequest +from .types.membership_deletion_response import MembershipDeletionResponse +from .types.membership_request import MembershipRequest +from .types.membership_response import MembershipResponse +from .types.memberships_response import MembershipsResponse +from .types.organization_api_keys_response import OrganizationApiKeysResponse +from .types.organization_projects_response import OrganizationProjectsResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawOrganizationsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get_organization_memberships( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[MembershipsResponse]: + """ + Get all memberships for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembershipsResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/organizations/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipsResponse, + parse_obj_as( + type_=MembershipsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def update_organization_membership( + self, + *, + request: MembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MembershipResponse]: + """ + Create or update a membership for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request : MembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembershipResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/organizations/memberships", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=MembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipResponse, + parse_obj_as( + type_=MembershipResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_organization_membership( + self, + *, + request: DeleteMembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MembershipDeletionResponse]: + """ + Delete a membership from the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request : DeleteMembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembershipDeletionResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/organizations/memberships", + method="DELETE", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=DeleteMembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipDeletionResponse, + parse_obj_as( + type_=MembershipDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_project_memberships( + self, + project_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MembershipsResponse]: + """ + Get all memberships for a specific project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembershipsResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipsResponse, + parse_obj_as( + type_=MembershipsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def update_project_membership( + self, + project_id: str, + *, + request: MembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MembershipResponse]: + """ + Create or update a membership for a specific project (requires organization-scoped API key). The user must already be a member of the organization. + + Parameters + ---------- + project_id : str + + request : MembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembershipResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/memberships", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=MembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipResponse, + parse_obj_as( + type_=MembershipResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_project_membership( + self, + project_id: str, + *, + request: DeleteMembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MembershipDeletionResponse]: + """ + Delete a membership from a specific project (requires organization-scoped API key). The user must be a member of the organization. + + Parameters + ---------- + project_id : str + + request : DeleteMembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembershipDeletionResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/memberships", + method="DELETE", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=DeleteMembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipDeletionResponse, + parse_obj_as( + type_=MembershipDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_organization_projects( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[OrganizationProjectsResponse]: + """ + Get all projects for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[OrganizationProjectsResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/organizations/projects", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + OrganizationProjectsResponse, + parse_obj_as( + type_=OrganizationProjectsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_organization_api_keys( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[OrganizationApiKeysResponse]: + """ + Get all API keys for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[OrganizationApiKeysResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/organizations/apiKeys", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + OrganizationApiKeysResponse, + parse_obj_as( + type_=OrganizationApiKeysResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawOrganizationsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get_organization_memberships( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[MembershipsResponse]: + """ + Get all memberships for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembershipsResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/organizations/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipsResponse, + parse_obj_as( + type_=MembershipsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def update_organization_membership( + self, + *, + request: MembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MembershipResponse]: + """ + Create or update a membership for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request : MembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembershipResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/organizations/memberships", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=MembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipResponse, + parse_obj_as( + type_=MembershipResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_organization_membership( + self, + *, + request: DeleteMembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MembershipDeletionResponse]: + """ + Delete a membership from the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request : DeleteMembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembershipDeletionResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/organizations/memberships", + method="DELETE", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=DeleteMembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipDeletionResponse, + parse_obj_as( + type_=MembershipDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_project_memberships( + self, + project_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MembershipsResponse]: + """ + Get all memberships for a specific project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembershipsResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipsResponse, + parse_obj_as( + type_=MembershipsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def update_project_membership( + self, + project_id: str, + *, + request: MembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MembershipResponse]: + """ + Create or update a membership for a specific project (requires organization-scoped API key). The user must already be a member of the organization. + + Parameters + ---------- + project_id : str + + request : MembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembershipResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/memberships", + method="PUT", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=MembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipResponse, + parse_obj_as( + type_=MembershipResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_project_membership( + self, + project_id: str, + *, + request: DeleteMembershipRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MembershipDeletionResponse]: + """ + Delete a membership from a specific project (requires organization-scoped API key). The user must be a member of the organization. + + Parameters + ---------- + project_id : str + + request : DeleteMembershipRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembershipDeletionResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/memberships", + method="DELETE", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=DeleteMembershipRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembershipDeletionResponse, + parse_obj_as( + type_=MembershipDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_organization_projects( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[OrganizationProjectsResponse]: + """ + Get all projects for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[OrganizationProjectsResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/organizations/projects", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + OrganizationProjectsResponse, + parse_obj_as( + type_=OrganizationProjectsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_organization_api_keys( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[OrganizationApiKeysResponse]: + """ + Get all API keys for the organization associated with the API key (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[OrganizationApiKeysResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/organizations/apiKeys", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + OrganizationApiKeysResponse, + parse_obj_as( + type_=OrganizationApiKeysResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/organizations/types/__init__.py b/langfuse/api/resources/organizations/types/__init__.py index b3ea09797..2ac6cb3e7 100644 --- a/langfuse/api/resources/organizations/types/__init__.py +++ b/langfuse/api/resources/organizations/types/__init__.py @@ -1,15 +1,61 @@ # This file was auto-generated by Fern from our API Definition. -from .delete_membership_request import DeleteMembershipRequest -from .membership_deletion_response import MembershipDeletionResponse -from .membership_request import MembershipRequest -from .membership_response import MembershipResponse -from .membership_role import MembershipRole -from .memberships_response import MembershipsResponse -from .organization_api_key import OrganizationApiKey -from .organization_api_keys_response import OrganizationApiKeysResponse -from .organization_project import OrganizationProject -from .organization_projects_response import OrganizationProjectsResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .delete_membership_request import DeleteMembershipRequest + from .membership_deletion_response import MembershipDeletionResponse + from .membership_request import MembershipRequest + from .membership_response import MembershipResponse + from .membership_role import MembershipRole + from .memberships_response import MembershipsResponse + from .organization_api_key import OrganizationApiKey + from .organization_api_keys_response import OrganizationApiKeysResponse + from .organization_project import OrganizationProject + from .organization_projects_response import OrganizationProjectsResponse +_dynamic_imports: typing.Dict[str, str] = { + "DeleteMembershipRequest": ".delete_membership_request", + "MembershipDeletionResponse": ".membership_deletion_response", + "MembershipRequest": ".membership_request", + "MembershipResponse": ".membership_response", + "MembershipRole": ".membership_role", + "MembershipsResponse": ".memberships_response", + "OrganizationApiKey": ".organization_api_key", + "OrganizationApiKeysResponse": ".organization_api_keys_response", + "OrganizationProject": ".organization_project", + "OrganizationProjectsResponse": ".organization_projects_response", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "DeleteMembershipRequest", diff --git a/langfuse/api/resources/organizations/types/delete_membership_request.py b/langfuse/api/resources/organizations/types/delete_membership_request.py index 6752b0aae..74952dcfb 100644 --- a/langfuse/api/resources/organizations/types/delete_membership_request.py +++ b/langfuse/api/resources/organizations/types/delete_membership_request.py @@ -1,44 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class DeleteMembershipRequest(pydantic_v1.BaseModel): - user_id: str = pydantic_v1.Field(alias="userId") +class DeleteMembershipRequest(UniversalBaseModel): + user_id: typing_extensions.Annotated[str, FieldMetadata(alias="userId")] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/membership_deletion_response.py b/langfuse/api/resources/organizations/types/membership_deletion_response.py index f9c1915b7..4ac522f58 100644 --- a/langfuse/api/resources/organizations/types/membership_deletion_response.py +++ b/langfuse/api/resources/organizations/types/membership_deletion_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class MembershipDeletionResponse(pydantic_v1.BaseModel): +class MembershipDeletionResponse(UniversalBaseModel): message: str - user_id: str = pydantic_v1.Field(alias="userId") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + user_id: typing_extensions.Annotated[str, FieldMetadata(alias="userId")] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/membership_request.py b/langfuse/api/resources/organizations/types/membership_request.py index a7f046f51..87a526fef 100644 --- a/langfuse/api/resources/organizations/types/membership_request.py +++ b/langfuse/api/resources/organizations/types/membership_request.py @@ -1,46 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .membership_role import MembershipRole -class MembershipRequest(pydantic_v1.BaseModel): - user_id: str = pydantic_v1.Field(alias="userId") +class MembershipRequest(UniversalBaseModel): + user_id: typing_extensions.Annotated[str, FieldMetadata(alias="userId")] role: MembershipRole - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/membership_response.py b/langfuse/api/resources/organizations/types/membership_response.py index e9d82f3c7..aa1b1afcf 100644 --- a/langfuse/api/resources/organizations/types/membership_response.py +++ b/langfuse/api/resources/organizations/types/membership_response.py @@ -1,48 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .membership_role import MembershipRole -class MembershipResponse(pydantic_v1.BaseModel): - user_id: str = pydantic_v1.Field(alias="userId") +class MembershipResponse(UniversalBaseModel): + user_id: typing_extensions.Annotated[str, FieldMetadata(alias="userId")] role: MembershipRole email: str name: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/membership_role.py b/langfuse/api/resources/organizations/types/membership_role.py index 1721cc0ed..d7683b8d5 100644 --- a/langfuse/api/resources/organizations/types/membership_role.py +++ b/langfuse/api/resources/organizations/types/membership_role.py @@ -1,29 +1,7 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class MembershipRole(str, enum.Enum): - OWNER = "OWNER" - ADMIN = "ADMIN" - MEMBER = "MEMBER" - VIEWER = "VIEWER" - - def visit( - self, - owner: typing.Callable[[], T_Result], - admin: typing.Callable[[], T_Result], - member: typing.Callable[[], T_Result], - viewer: typing.Callable[[], T_Result], - ) -> T_Result: - if self is MembershipRole.OWNER: - return owner() - if self is MembershipRole.ADMIN: - return admin() - if self is MembershipRole.MEMBER: - return member() - if self is MembershipRole.VIEWER: - return viewer() +MembershipRole = typing.Union[ + typing.Literal["OWNER", "ADMIN", "MEMBER", "VIEWER"], typing.Any +] diff --git a/langfuse/api/resources/organizations/types/memberships_response.py b/langfuse/api/resources/organizations/types/memberships_response.py index 0a8091449..1c847542c 100644 --- a/langfuse/api/resources/organizations/types/memberships_response.py +++ b/langfuse/api/resources/organizations/types/memberships_response.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .membership_response import MembershipResponse -class MembershipsResponse(pydantic_v1.BaseModel): +class MembershipsResponse(UniversalBaseModel): memberships: typing.List[MembershipResponse] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/organization_api_key.py b/langfuse/api/resources/organizations/types/organization_api_key.py index ad54bb182..76fab2e81 100644 --- a/langfuse/api/resources/organizations/types/organization_api_key.py +++ b/langfuse/api/resources/organizations/types/organization_api_key.py @@ -3,52 +3,36 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class OrganizationApiKey(pydantic_v1.BaseModel): +class OrganizationApiKey(UniversalBaseModel): id: str - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - expires_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="expiresAt", default=None - ) - last_used_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="lastUsedAt", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + expires_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="expiresAt") + ] = None + last_used_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="lastUsedAt") + ] = None note: typing.Optional[str] = None - public_key: str = pydantic_v1.Field(alias="publicKey") - display_secret_key: str = pydantic_v1.Field(alias="displaySecretKey") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + public_key: typing_extensions.Annotated[str, FieldMetadata(alias="publicKey")] + display_secret_key: typing_extensions.Annotated[ + str, FieldMetadata(alias="displaySecretKey") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/organization_api_keys_response.py b/langfuse/api/resources/organizations/types/organization_api_keys_response.py index e19ce6373..cc4c4f2e1 100644 --- a/langfuse/api/resources/organizations/types/organization_api_keys_response.py +++ b/langfuse/api/resources/organizations/types/organization_api_keys_response.py @@ -1,45 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .organization_api_key import OrganizationApiKey -class OrganizationApiKeysResponse(pydantic_v1.BaseModel): - api_keys: typing.List[OrganizationApiKey] = pydantic_v1.Field(alias="apiKeys") +class OrganizationApiKeysResponse(UniversalBaseModel): + api_keys: typing_extensions.Annotated[ + typing.List[OrganizationApiKey], FieldMetadata(alias="apiKeys") + ] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/organization_project.py b/langfuse/api/resources/organizations/types/organization_project.py index 87f245b9a..993fd724f 100644 --- a/langfuse/api/resources/organizations/types/organization_project.py +++ b/langfuse/api/resources/organizations/types/organization_project.py @@ -3,46 +3,30 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class OrganizationProject(pydantic_v1.BaseModel): +class OrganizationProject(UniversalBaseModel): id: str name: str metadata: typing.Optional[typing.Dict[str, typing.Any]] = None - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/organizations/types/organization_projects_response.py b/langfuse/api/resources/organizations/types/organization_projects_response.py index 1c939a3e0..3ef0a658a 100644 --- a/langfuse/api/resources/organizations/types/organization_projects_response.py +++ b/langfuse/api/resources/organizations/types/organization_projects_response.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .organization_project import OrganizationProject -class OrganizationProjectsResponse(pydantic_v1.BaseModel): +class OrganizationProjectsResponse(UniversalBaseModel): projects: typing.List[OrganizationProject] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/__init__.py b/langfuse/api/resources/projects/__init__.py index 26c74c1c7..6cb87b5e2 100644 --- a/langfuse/api/resources/projects/__init__.py +++ b/langfuse/api/resources/projects/__init__.py @@ -1,14 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - ApiKeyDeletionResponse, - ApiKeyList, - ApiKeyResponse, - ApiKeySummary, - Project, - ProjectDeletionResponse, - Projects, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + ApiKeyDeletionResponse, + ApiKeyList, + ApiKeyResponse, + ApiKeySummary, + Project, + ProjectDeletionResponse, + Projects, + ) +_dynamic_imports: typing.Dict[str, str] = { + "ApiKeyDeletionResponse": ".types", + "ApiKeyList": ".types", + "ApiKeyResponse": ".types", + "ApiKeySummary": ".types", + "Project": ".types", + "ProjectDeletionResponse": ".types", + "Projects": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "ApiKeyDeletionResponse", diff --git a/langfuse/api/resources/projects/client.py b/langfuse/api/resources/projects/client.py index 9af7cfdfa..71324b0f6 100644 --- a/langfuse/api/resources/projects/client.py +++ b/langfuse/api/resources/projects/client.py @@ -1,18 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawProjectsClient, RawProjectsClient from .types.api_key_deletion_response import ApiKeyDeletionResponse from .types.api_key_list import ApiKeyList from .types.api_key_response import ApiKeyResponse @@ -26,7 +18,18 @@ class ProjectsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawProjectsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawProjectsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawProjectsClient + """ + return self._raw_client def get( self, *, request_options: typing.Optional[RequestOptions] = None @@ -45,7 +48,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -57,34 +60,8 @@ def get( ) client.projects.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/projects", method="GET", request_options=request_options - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Projects, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data def create( self, @@ -116,7 +93,7 @@ def create( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -131,38 +108,13 @@ def create( retention=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/projects", - method="POST", - json={"name": name, "metadata": metadata, "retention": retention}, + _response = self._raw_client.create( + name=name, + retention=retention, + metadata=metadata, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def update( self, @@ -197,7 +149,7 @@ def update( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -213,38 +165,14 @@ def update( retention=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}", - method="PUT", - json={"name": name, "metadata": metadata, "retention": retention}, + _response = self._raw_client.update( + project_id, + name=name, + retention=retention, + metadata=metadata, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete( self, @@ -268,7 +196,7 @@ def delete( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -282,38 +210,8 @@ def delete( project_id="projectId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - ProjectDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(project_id, request_options=request_options) + return _response.data def get_api_keys( self, @@ -337,7 +235,7 @@ def get_api_keys( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -351,36 +249,10 @@ def get_api_keys( project_id="projectId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", - method="GET", - request_options=request_options, + _response = self._raw_client.get_api_keys( + project_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApiKeyList, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_api_key( self, @@ -416,7 +288,7 @@ def create_api_key( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -430,38 +302,14 @@ def create_api_key( project_id="projectId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", - method="POST", - json={"note": note, "publicKey": public_key, "secretKey": secret_key}, + _response = self._raw_client.create_api_key( + project_id, + note=note, + public_key=public_key, + secret_key=secret_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApiKeyResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_api_key( self, @@ -488,7 +336,7 @@ def delete_api_key( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -503,43 +351,26 @@ def delete_api_key( api_key_id="apiKeyId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys/{jsonable_encoder(api_key_id)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.delete_api_key( + project_id, api_key_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - ApiKeyDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncProjectsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawProjectsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawProjectsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawProjectsClient + """ + return self._raw_client async def get( self, *, request_options: typing.Optional[RequestOptions] = None @@ -560,7 +391,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -578,34 +409,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/projects", method="GET", request_options=request_options - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Projects, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data async def create( self, @@ -639,7 +444,7 @@ async def create( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -660,38 +465,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/projects", - method="POST", - json={"name": name, "metadata": metadata, "retention": retention}, + _response = await self._raw_client.create( + name=name, + retention=retention, + metadata=metadata, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def update( self, @@ -728,7 +508,7 @@ async def update( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -750,38 +530,14 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}", - method="PUT", - json={"name": name, "metadata": metadata, "retention": retention}, + _response = await self._raw_client.update( + project_id, + name=name, + retention=retention, + metadata=metadata, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete( self, @@ -807,7 +563,7 @@ async def delete( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -827,38 +583,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete( + project_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - ProjectDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_api_keys( self, @@ -884,7 +612,7 @@ async def get_api_keys( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -904,36 +632,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_api_keys( + project_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApiKeyList, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_api_key( self, @@ -971,7 +673,7 @@ async def create_api_key( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -991,38 +693,14 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", - method="POST", - json={"note": note, "publicKey": public_key, "secretKey": secret_key}, + _response = await self._raw_client.create_api_key( + project_id, + note=note, + public_key=public_key, + secret_key=secret_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApiKeyResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_api_key( self, @@ -1051,7 +729,7 @@ async def delete_api_key( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1072,35 +750,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys/{jsonable_encoder(api_key_id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete_api_key( + project_id, api_key_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - ApiKeyDeletionResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/projects/raw_client.py b/langfuse/api/resources/projects/raw_client.py new file mode 100644 index 000000000..c1316c7f4 --- /dev/null +++ b/langfuse/api/resources/projects/raw_client.py @@ -0,0 +1,1571 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.api_key_deletion_response import ApiKeyDeletionResponse +from .types.api_key_list import ApiKeyList +from .types.api_key_response import ApiKeyResponse +from .types.project import Project +from .types.project_deletion_response import ProjectDeletionResponse +from .types.projects import Projects + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawProjectsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Projects]: + """ + Get Project associated with API key + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Projects] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/projects", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Projects, + parse_obj_as( + type_=Projects, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create( + self, + *, + name: str, + retention: int, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Project]: + """ + Create a new project (requires organization-scoped API key) + + Parameters + ---------- + name : str + + retention : int + Number of days to retain data. Must be 0 or at least 3 days. Requires data-retention entitlement for non-zero values. Optional. + + metadata : typing.Optional[typing.Dict[str, typing.Any]] + Optional metadata for the project + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Project] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/projects", + method="POST", + json={ + "name": name, + "metadata": metadata, + "retention": retention, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def update( + self, + project_id: str, + *, + name: str, + retention: int, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Project]: + """ + Update a project by ID (requires organization-scoped API key). + + Parameters + ---------- + project_id : str + + name : str + + retention : int + Number of days to retain data. Must be 0 or at least 3 days. Requires data-retention entitlement for non-zero values. Optional. + + metadata : typing.Optional[typing.Dict[str, typing.Any]] + Optional metadata for the project + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Project] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}", + method="PUT", + json={ + "name": name, + "metadata": metadata, + "retention": retention, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete( + self, + project_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectDeletionResponse]: + """ + Delete a project by ID (requires organization-scoped API key). Project deletion is processed asynchronously. + + Parameters + ---------- + project_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectDeletionResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectDeletionResponse, + parse_obj_as( + type_=ProjectDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_api_keys( + self, + project_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ApiKeyList]: + """ + Get all API keys for a project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ApiKeyList] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiKeyList, + parse_obj_as( + type_=ApiKeyList, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create_api_key( + self, + project_id: str, + *, + note: typing.Optional[str] = OMIT, + public_key: typing.Optional[str] = OMIT, + secret_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ApiKeyResponse]: + """ + Create a new API key for a project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + note : typing.Optional[str] + Optional note for the API key + + public_key : typing.Optional[str] + Optional predefined public key. Must start with 'pk-lf-'. If provided, secretKey must also be provided. + + secret_key : typing.Optional[str] + Optional predefined secret key. Must start with 'sk-lf-'. If provided, publicKey must also be provided. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ApiKeyResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", + method="POST", + json={ + "note": note, + "publicKey": public_key, + "secretKey": secret_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiKeyResponse, + parse_obj_as( + type_=ApiKeyResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_api_key( + self, + project_id: str, + api_key_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ApiKeyDeletionResponse]: + """ + Delete an API key for a project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + api_key_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ApiKeyDeletionResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys/{jsonable_encoder(api_key_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiKeyDeletionResponse, + parse_obj_as( + type_=ApiKeyDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawProjectsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Projects]: + """ + Get Project associated with API key + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Projects] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/projects", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Projects, + parse_obj_as( + type_=Projects, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create( + self, + *, + name: str, + retention: int, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Project]: + """ + Create a new project (requires organization-scoped API key) + + Parameters + ---------- + name : str + + retention : int + Number of days to retain data. Must be 0 or at least 3 days. Requires data-retention entitlement for non-zero values. Optional. + + metadata : typing.Optional[typing.Dict[str, typing.Any]] + Optional metadata for the project + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Project] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/projects", + method="POST", + json={ + "name": name, + "metadata": metadata, + "retention": retention, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def update( + self, + project_id: str, + *, + name: str, + retention: int, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Project]: + """ + Update a project by ID (requires organization-scoped API key). + + Parameters + ---------- + project_id : str + + name : str + + retention : int + Number of days to retain data. Must be 0 or at least 3 days. Requires data-retention entitlement for non-zero values. Optional. + + metadata : typing.Optional[typing.Dict[str, typing.Any]] + Optional metadata for the project + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Project] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}", + method="PUT", + json={ + "name": name, + "metadata": metadata, + "retention": retention, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete( + self, + project_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectDeletionResponse]: + """ + Delete a project by ID (requires organization-scoped API key). Project deletion is processed asynchronously. + + Parameters + ---------- + project_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectDeletionResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectDeletionResponse, + parse_obj_as( + type_=ProjectDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_api_keys( + self, + project_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ApiKeyList]: + """ + Get all API keys for a project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ApiKeyList] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiKeyList, + parse_obj_as( + type_=ApiKeyList, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create_api_key( + self, + project_id: str, + *, + note: typing.Optional[str] = OMIT, + public_key: typing.Optional[str] = OMIT, + secret_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ApiKeyResponse]: + """ + Create a new API key for a project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + note : typing.Optional[str] + Optional note for the API key + + public_key : typing.Optional[str] + Optional predefined public key. Must start with 'pk-lf-'. If provided, secretKey must also be provided. + + secret_key : typing.Optional[str] + Optional predefined secret key. Must start with 'sk-lf-'. If provided, publicKey must also be provided. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ApiKeyResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys", + method="POST", + json={ + "note": note, + "publicKey": public_key, + "secretKey": secret_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiKeyResponse, + parse_obj_as( + type_=ApiKeyResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_api_key( + self, + project_id: str, + api_key_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ApiKeyDeletionResponse]: + """ + Delete an API key for a project (requires organization-scoped API key) + + Parameters + ---------- + project_id : str + + api_key_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ApiKeyDeletionResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/projects/{jsonable_encoder(project_id)}/apiKeys/{jsonable_encoder(api_key_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiKeyDeletionResponse, + parse_obj_as( + type_=ApiKeyDeletionResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/projects/types/__init__.py b/langfuse/api/resources/projects/types/__init__.py index c59b62a62..e336c1ebb 100644 --- a/langfuse/api/resources/projects/types/__init__.py +++ b/langfuse/api/resources/projects/types/__init__.py @@ -1,12 +1,55 @@ # This file was auto-generated by Fern from our API Definition. -from .api_key_deletion_response import ApiKeyDeletionResponse -from .api_key_list import ApiKeyList -from .api_key_response import ApiKeyResponse -from .api_key_summary import ApiKeySummary -from .project import Project -from .project_deletion_response import ProjectDeletionResponse -from .projects import Projects +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .api_key_deletion_response import ApiKeyDeletionResponse + from .api_key_list import ApiKeyList + from .api_key_response import ApiKeyResponse + from .api_key_summary import ApiKeySummary + from .project import Project + from .project_deletion_response import ProjectDeletionResponse + from .projects import Projects +_dynamic_imports: typing.Dict[str, str] = { + "ApiKeyDeletionResponse": ".api_key_deletion_response", + "ApiKeyList": ".api_key_list", + "ApiKeyResponse": ".api_key_response", + "ApiKeySummary": ".api_key_summary", + "Project": ".project", + "ProjectDeletionResponse": ".project_deletion_response", + "Projects": ".projects", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "ApiKeyDeletionResponse", diff --git a/langfuse/api/resources/projects/types/api_key_deletion_response.py b/langfuse/api/resources/projects/types/api_key_deletion_response.py index 6084400de..25d0d1a85 100644 --- a/langfuse/api/resources/projects/types/api_key_deletion_response.py +++ b/langfuse/api/resources/projects/types/api_key_deletion_response.py @@ -1,46 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ApiKeyDeletionResponse(pydantic_v1.BaseModel): +class ApiKeyDeletionResponse(UniversalBaseModel): """ Response for API key deletion """ success: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/types/api_key_list.py b/langfuse/api/resources/projects/types/api_key_list.py index 0a798ddbf..0ab0bf459 100644 --- a/langfuse/api/resources/projects/types/api_key_list.py +++ b/langfuse/api/resources/projects/types/api_key_list.py @@ -1,49 +1,30 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .api_key_summary import ApiKeySummary -class ApiKeyList(pydantic_v1.BaseModel): +class ApiKeyList(UniversalBaseModel): """ List of API keys for a project """ - api_keys: typing.List[ApiKeySummary] = pydantic_v1.Field(alias="apiKeys") + api_keys: typing_extensions.Annotated[ + typing.List[ApiKeySummary], FieldMetadata(alias="apiKeys") + ] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/types/api_key_response.py b/langfuse/api/resources/projects/types/api_key_response.py index fc9364faf..32b6cc5e0 100644 --- a/langfuse/api/resources/projects/types/api_key_response.py +++ b/langfuse/api/resources/projects/types/api_key_response.py @@ -3,51 +3,35 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class ApiKeyResponse(pydantic_v1.BaseModel): +class ApiKeyResponse(UniversalBaseModel): """ Response for API key creation """ id: str - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - public_key: str = pydantic_v1.Field(alias="publicKey") - secret_key: str = pydantic_v1.Field(alias="secretKey") - display_secret_key: str = pydantic_v1.Field(alias="displaySecretKey") + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + public_key: typing_extensions.Annotated[str, FieldMetadata(alias="publicKey")] + secret_key: typing_extensions.Annotated[str, FieldMetadata(alias="secretKey")] + display_secret_key: typing_extensions.Annotated[ + str, FieldMetadata(alias="displaySecretKey") + ] note: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/types/api_key_summary.py b/langfuse/api/resources/projects/types/api_key_summary.py index b95633731..510ed5a87 100644 --- a/langfuse/api/resources/projects/types/api_key_summary.py +++ b/langfuse/api/resources/projects/types/api_key_summary.py @@ -3,56 +3,40 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class ApiKeySummary(pydantic_v1.BaseModel): +class ApiKeySummary(UniversalBaseModel): """ Summary of an API key """ id: str - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - expires_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="expiresAt", default=None - ) - last_used_at: typing.Optional[dt.datetime] = pydantic_v1.Field( - alias="lastUsedAt", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + expires_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="expiresAt") + ] = None + last_used_at: typing_extensions.Annotated[ + typing.Optional[dt.datetime], FieldMetadata(alias="lastUsedAt") + ] = None note: typing.Optional[str] = None - public_key: str = pydantic_v1.Field(alias="publicKey") - display_secret_key: str = pydantic_v1.Field(alias="displaySecretKey") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + public_key: typing_extensions.Annotated[str, FieldMetadata(alias="publicKey")] + display_secret_key: typing_extensions.Annotated[ + str, FieldMetadata(alias="displaySecretKey") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/types/project.py b/langfuse/api/resources/projects/types/project.py index cf257d406..6c500f376 100644 --- a/langfuse/api/resources/projects/types/project.py +++ b/langfuse/api/resources/projects/types/project.py @@ -1,56 +1,35 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class Project(pydantic_v1.BaseModel): +class Project(UniversalBaseModel): id: str name: str - metadata: typing.Dict[str, typing.Any] = pydantic_v1.Field() + metadata: typing.Dict[str, typing.Any] = pydantic.Field() """ Metadata for the project """ - retention_days: typing.Optional[int] = pydantic_v1.Field( - alias="retentionDays", default=None - ) + retention_days: typing_extensions.Annotated[ + typing.Optional[int], FieldMetadata(alias="retentionDays") + ] = pydantic.Field(default=None) """ Number of days to retain data. Null or 0 means no retention. Omitted if no retention is configured. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/types/project_deletion_response.py b/langfuse/api/resources/projects/types/project_deletion_response.py index 62c05d3d8..ceff3073d 100644 --- a/langfuse/api/resources/projects/types/project_deletion_response.py +++ b/langfuse/api/resources/projects/types/project_deletion_response.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ProjectDeletionResponse(pydantic_v1.BaseModel): +class ProjectDeletionResponse(UniversalBaseModel): success: bool message: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/projects/types/projects.py b/langfuse/api/resources/projects/types/projects.py index c5eaabfbd..a5e0e2beb 100644 --- a/langfuse/api/resources/projects/types/projects.py +++ b/langfuse/api/resources/projects/types/projects.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .project import Project -class Projects(pydantic_v1.BaseModel): +class Projects(UniversalBaseModel): data: typing.List[Project] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompt_version/__init__.py b/langfuse/api/resources/prompt_version/__init__.py index f3ea2659b..5cde0202d 100644 --- a/langfuse/api/resources/prompt_version/__init__.py +++ b/langfuse/api/resources/prompt_version/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/langfuse/api/resources/prompt_version/client.py b/langfuse/api/resources/prompt_version/client.py index 89140941d..567060b84 100644 --- a/langfuse/api/resources/prompt_version/client.py +++ b/langfuse/api/resources/prompt_version/client.py @@ -1,19 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..prompts.types.prompt import Prompt +from .raw_client import AsyncRawPromptVersionClient, RawPromptVersionClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -21,7 +13,18 @@ class PromptVersionClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawPromptVersionClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPromptVersionClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPromptVersionClient + """ + return self._raw_client def update( self, @@ -54,7 +57,7 @@ def update( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -70,43 +73,26 @@ def update( new_labels=["newLabels", "newLabels"], ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/v2/prompts/{jsonable_encoder(name)}/versions/{jsonable_encoder(version)}", - method="PATCH", - json={"newLabels": new_labels}, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update( + name, version, new_labels=new_labels, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPromptVersionClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawPromptVersionClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPromptVersionClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPromptVersionClient + """ + return self._raw_client async def update( self, @@ -141,7 +127,7 @@ async def update( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -163,35 +149,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/v2/prompts/{jsonable_encoder(name)}/versions/{jsonable_encoder(version)}", - method="PATCH", - json={"newLabels": new_labels}, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update( + name, version, new_labels=new_labels, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/prompt_version/raw_client.py b/langfuse/api/resources/prompt_version/raw_client.py new file mode 100644 index 000000000..2a999d9b8 --- /dev/null +++ b/langfuse/api/resources/prompt_version/raw_client.py @@ -0,0 +1,262 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..prompts.types.prompt import Prompt + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPromptVersionClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def update( + self, + name: str, + version: int, + *, + new_labels: typing.Sequence[str], + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Update labels for a specific prompt version + + Parameters + ---------- + name : str + The name of the prompt + + version : int + Version of the prompt to update + + new_labels : typing.Sequence[str] + New labels for the prompt version. Labels are unique across versions. The "latest" label is reserved and managed by Langfuse. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/v2/prompts/{jsonable_encoder(name)}/versions/{jsonable_encoder(version)}", + method="PATCH", + json={ + "newLabels": new_labels, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawPromptVersionClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def update( + self, + name: str, + version: int, + *, + new_labels: typing.Sequence[str], + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Update labels for a specific prompt version + + Parameters + ---------- + name : str + The name of the prompt + + version : int + Version of the prompt to update + + new_labels : typing.Sequence[str] + New labels for the prompt version. Labels are unique across versions. The "latest" label is reserved and managed by Langfuse. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/v2/prompts/{jsonable_encoder(name)}/versions/{jsonable_encoder(version)}", + method="PATCH", + json={ + "newLabels": new_labels, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/prompts/__init__.py b/langfuse/api/resources/prompts/__init__.py index ea2f2f56a..0d9dc2bd0 100644 --- a/langfuse/api/resources/prompts/__init__.py +++ b/langfuse/api/resources/prompts/__init__.py @@ -1,33 +1,82 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - BasePrompt, - ChatMessage, - ChatMessageWithPlaceholders, - ChatMessageWithPlaceholders_Chatmessage, - ChatMessageWithPlaceholders_Placeholder, - ChatPrompt, - CreateChatPromptRequest, - CreatePromptRequest, - CreatePromptRequest_Chat, - CreatePromptRequest_Text, - CreateTextPromptRequest, - PlaceholderMessage, - Prompt, - PromptMeta, - PromptMetaListResponse, - PromptType, - Prompt_Chat, - Prompt_Text, - TextPrompt, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + BasePrompt, + ChatMessage, + ChatMessageWithPlaceholders, + ChatPrompt, + CreateChatPromptRequest, + CreatePromptRequest, + CreatePromptRequest_Chat, + CreatePromptRequest_Text, + CreateTextPromptRequest, + PlaceholderMessage, + Prompt, + PromptMeta, + PromptMetaListResponse, + PromptType, + Prompt_Chat, + Prompt_Text, + TextPrompt, + ) +_dynamic_imports: typing.Dict[str, str] = { + "BasePrompt": ".types", + "ChatMessage": ".types", + "ChatMessageWithPlaceholders": ".types", + "ChatPrompt": ".types", + "CreateChatPromptRequest": ".types", + "CreatePromptRequest": ".types", + "CreatePromptRequest_Chat": ".types", + "CreatePromptRequest_Text": ".types", + "CreateTextPromptRequest": ".types", + "PlaceholderMessage": ".types", + "Prompt": ".types", + "PromptMeta": ".types", + "PromptMetaListResponse": ".types", + "PromptType": ".types", + "Prompt_Chat": ".types", + "Prompt_Text": ".types", + "TextPrompt": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BasePrompt", "ChatMessage", "ChatMessageWithPlaceholders", - "ChatMessageWithPlaceholders_Chatmessage", - "ChatMessageWithPlaceholders_Placeholder", "ChatPrompt", "CreateChatPromptRequest", "CreatePromptRequest", diff --git a/langfuse/api/resources/prompts/client.py b/langfuse/api/resources/prompts/client.py index c38c20156..27fe5829b 100644 --- a/langfuse/api/resources/prompts/client.py +++ b/langfuse/api/resources/prompts/client.py @@ -2,19 +2,10 @@ import datetime as dt import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.datetime_utils import serialize_datetime -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawPromptsClient, RawPromptsClient from .types.create_prompt_request import CreatePromptRequest from .types.prompt import Prompt from .types.prompt_meta_list_response import PromptMetaListResponse @@ -25,7 +16,18 @@ class PromptsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawPromptsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPromptsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPromptsClient + """ + return self._raw_client def get( self, @@ -58,7 +60,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -72,37 +74,10 @@ def get( prompt_name="promptName", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/v2/prompts/{jsonable_encoder(prompt_name)}", - method="GET", - params={"version": version, "label": label}, - request_options=request_options, + _response = self._raw_client.get( + prompt_name, version=version, label=label, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def list( self, @@ -148,7 +123,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -160,51 +135,17 @@ def list( ) client.prompts.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/v2/prompts", - method="GET", - params={ - "name": name, - "label": label, - "tag": tag, - "page": page, - "limit": limit, - "fromUpdatedAt": serialize_datetime(from_updated_at) - if from_updated_at is not None - else None, - "toUpdatedAt": serialize_datetime(to_updated_at) - if to_updated_at is not None - else None, - }, + _response = self._raw_client.list( + name=name, + label=label, + tag=tag, + page=page, + limit=limit, + from_updated_at=from_updated_at, + to_updated_at=to_updated_at, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PromptMetaListResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -228,11 +169,8 @@ def create( Examples -------- - from langfuse import ( - ChatMessageWithPlaceholders_Chatmessage, - CreatePromptRequest_Chat, - ) - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.prompts import ChatMessage, CreatePromptRequest_Chat client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -246,11 +184,11 @@ def create( request=CreatePromptRequest_Chat( name="name", prompt=[ - ChatMessageWithPlaceholders_Chatmessage( + ChatMessage( role="role", content="content", ), - ChatMessageWithPlaceholders_Chatmessage( + ChatMessage( role="role", content="content", ), @@ -258,43 +196,26 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/v2/prompts", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPromptsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawPromptsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPromptsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPromptsClient + """ + return self._raw_client async def get( self, @@ -329,7 +250,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -349,37 +270,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/v2/prompts/{jsonable_encoder(prompt_name)}", - method="GET", - params={"version": version, "label": label}, - request_options=request_options, + _response = await self._raw_client.get( + prompt_name, version=version, label=label, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list( self, @@ -427,7 +321,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -445,51 +339,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/prompts", - method="GET", - params={ - "name": name, - "label": label, - "tag": tag, - "page": page, - "limit": limit, - "fromUpdatedAt": serialize_datetime(from_updated_at) - if from_updated_at is not None - else None, - "toUpdatedAt": serialize_datetime(to_updated_at) - if to_updated_at is not None - else None, - }, + _response = await self._raw_client.list( + name=name, + label=label, + tag=tag, + page=page, + limit=limit, + from_updated_at=from_updated_at, + to_updated_at=to_updated_at, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as( - PromptMetaListResponse, _response.json() - ) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -515,11 +375,8 @@ async def create( -------- import asyncio - from langfuse import ( - ChatMessageWithPlaceholders_Chatmessage, - CreatePromptRequest_Chat, - ) - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.prompts import ChatMessage, CreatePromptRequest_Chat client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -536,11 +393,11 @@ async def main() -> None: request=CreatePromptRequest_Chat( name="name", prompt=[ - ChatMessageWithPlaceholders_Chatmessage( + ChatMessage( role="role", content="content", ), - ChatMessageWithPlaceholders_Chatmessage( + ChatMessage( role="role", content="content", ), @@ -551,35 +408,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/prompts", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/prompts/raw_client.py b/langfuse/api/resources/prompts/raw_client.py new file mode 100644 index 000000000..458af4136 --- /dev/null +++ b/langfuse/api/resources/prompts/raw_client.py @@ -0,0 +1,757 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.datetime_utils import serialize_datetime +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.create_prompt_request import CreatePromptRequest +from .types.prompt import Prompt +from .types.prompt_meta_list_response import PromptMetaListResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPromptsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get( + self, + prompt_name: str, + *, + version: typing.Optional[int] = None, + label: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Get a prompt + + Parameters + ---------- + prompt_name : str + The name of the prompt + + version : typing.Optional[int] + Version of the prompt to be retrieved. + + label : typing.Optional[str] + Label of the prompt to be retrieved. Defaults to "production" if no label or version is set. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/v2/prompts/{jsonable_encoder(prompt_name)}", + method="GET", + params={ + "version": version, + "label": label, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list( + self, + *, + name: typing.Optional[str] = None, + label: typing.Optional[str] = None, + tag: typing.Optional[str] = None, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + from_updated_at: typing.Optional[dt.datetime] = None, + to_updated_at: typing.Optional[dt.datetime] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptMetaListResponse]: + """ + Get a list of prompt names with versions and labels + + Parameters + ---------- + name : typing.Optional[str] + + label : typing.Optional[str] + + tag : typing.Optional[str] + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + from_updated_at : typing.Optional[dt.datetime] + Optional filter to only include prompt versions created/updated on or after a certain datetime (ISO 8601) + + to_updated_at : typing.Optional[dt.datetime] + Optional filter to only include prompt versions created/updated before a certain datetime (ISO 8601) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptMetaListResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/v2/prompts", + method="GET", + params={ + "name": name, + "label": label, + "tag": tag, + "page": page, + "limit": limit, + "fromUpdatedAt": serialize_datetime(from_updated_at) + if from_updated_at is not None + else None, + "toUpdatedAt": serialize_datetime(to_updated_at) + if to_updated_at is not None + else None, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptMetaListResponse, + parse_obj_as( + type_=PromptMetaListResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create( + self, + *, + request: CreatePromptRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Create a new version for the prompt with the given `name` + + Parameters + ---------- + request : CreatePromptRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/v2/prompts", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreatePromptRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawPromptsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, + prompt_name: str, + *, + version: typing.Optional[int] = None, + label: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Get a prompt + + Parameters + ---------- + prompt_name : str + The name of the prompt + + version : typing.Optional[int] + Version of the prompt to be retrieved. + + label : typing.Optional[str] + Label of the prompt to be retrieved. Defaults to "production" if no label or version is set. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/v2/prompts/{jsonable_encoder(prompt_name)}", + method="GET", + params={ + "version": version, + "label": label, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list( + self, + *, + name: typing.Optional[str] = None, + label: typing.Optional[str] = None, + tag: typing.Optional[str] = None, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + from_updated_at: typing.Optional[dt.datetime] = None, + to_updated_at: typing.Optional[dt.datetime] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptMetaListResponse]: + """ + Get a list of prompt names with versions and labels + + Parameters + ---------- + name : typing.Optional[str] + + label : typing.Optional[str] + + tag : typing.Optional[str] + + page : typing.Optional[int] + page number, starts at 1 + + limit : typing.Optional[int] + limit of items per page + + from_updated_at : typing.Optional[dt.datetime] + Optional filter to only include prompt versions created/updated on or after a certain datetime (ISO 8601) + + to_updated_at : typing.Optional[dt.datetime] + Optional filter to only include prompt versions created/updated before a certain datetime (ISO 8601) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptMetaListResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/v2/prompts", + method="GET", + params={ + "name": name, + "label": label, + "tag": tag, + "page": page, + "limit": limit, + "fromUpdatedAt": serialize_datetime(from_updated_at) + if from_updated_at is not None + else None, + "toUpdatedAt": serialize_datetime(to_updated_at) + if to_updated_at is not None + else None, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptMetaListResponse, + parse_obj_as( + type_=PromptMetaListResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create( + self, + *, + request: CreatePromptRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Create a new version for the prompt with the given `name` + + Parameters + ---------- + request : CreatePromptRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/v2/prompts", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreatePromptRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/prompts/types/__init__.py b/langfuse/api/resources/prompts/types/__init__.py index 6678ec262..441cb253e 100644 --- a/langfuse/api/resources/prompts/types/__init__.py +++ b/langfuse/api/resources/prompts/types/__init__.py @@ -1,33 +1,80 @@ # This file was auto-generated by Fern from our API Definition. -from .base_prompt import BasePrompt -from .chat_message import ChatMessage -from .chat_message_with_placeholders import ( - ChatMessageWithPlaceholders, - ChatMessageWithPlaceholders_Chatmessage, - ChatMessageWithPlaceholders_Placeholder, -) -from .chat_prompt import ChatPrompt -from .create_chat_prompt_request import CreateChatPromptRequest -from .create_prompt_request import ( - CreatePromptRequest, - CreatePromptRequest_Chat, - CreatePromptRequest_Text, -) -from .create_text_prompt_request import CreateTextPromptRequest -from .placeholder_message import PlaceholderMessage -from .prompt import Prompt, Prompt_Chat, Prompt_Text -from .prompt_meta import PromptMeta -from .prompt_meta_list_response import PromptMetaListResponse -from .prompt_type import PromptType -from .text_prompt import TextPrompt +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .base_prompt import BasePrompt + from .chat_message import ChatMessage + from .chat_message_with_placeholders import ChatMessageWithPlaceholders + from .chat_prompt import ChatPrompt + from .create_chat_prompt_request import CreateChatPromptRequest + from .create_prompt_request import ( + CreatePromptRequest, + CreatePromptRequest_Chat, + CreatePromptRequest_Text, + ) + from .create_text_prompt_request import CreateTextPromptRequest + from .placeholder_message import PlaceholderMessage + from .prompt import Prompt, Prompt_Chat, Prompt_Text + from .prompt_meta import PromptMeta + from .prompt_meta_list_response import PromptMetaListResponse + from .prompt_type import PromptType + from .text_prompt import TextPrompt +_dynamic_imports: typing.Dict[str, str] = { + "BasePrompt": ".base_prompt", + "ChatMessage": ".chat_message", + "ChatMessageWithPlaceholders": ".chat_message_with_placeholders", + "ChatPrompt": ".chat_prompt", + "CreateChatPromptRequest": ".create_chat_prompt_request", + "CreatePromptRequest": ".create_prompt_request", + "CreatePromptRequest_Chat": ".create_prompt_request", + "CreatePromptRequest_Text": ".create_prompt_request", + "CreateTextPromptRequest": ".create_text_prompt_request", + "PlaceholderMessage": ".placeholder_message", + "Prompt": ".prompt", + "PromptMeta": ".prompt_meta", + "PromptMetaListResponse": ".prompt_meta_list_response", + "PromptType": ".prompt_type", + "Prompt_Chat": ".prompt", + "Prompt_Text": ".prompt", + "TextPrompt": ".text_prompt", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "BasePrompt", "ChatMessage", "ChatMessageWithPlaceholders", - "ChatMessageWithPlaceholders_Chatmessage", - "ChatMessageWithPlaceholders_Placeholder", "ChatPrompt", "CreateChatPromptRequest", "CreatePromptRequest", diff --git a/langfuse/api/resources/prompts/types/base_prompt.py b/langfuse/api/resources/prompts/types/base_prompt.py index eff295cc5..3e945ad1a 100644 --- a/langfuse/api/resources/prompts/types/base_prompt.py +++ b/langfuse/api/resources/prompts/types/base_prompt.py @@ -1,69 +1,49 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class BasePrompt(pydantic_v1.BaseModel): +class BasePrompt(UniversalBaseModel): name: str version: int config: typing.Any - labels: typing.List[str] = pydantic_v1.Field() + labels: typing.List[str] = pydantic.Field() """ List of deployment labels of this prompt version. """ - tags: typing.List[str] = pydantic_v1.Field() + tags: typing.List[str] = pydantic.Field() """ List of tags. Used to filter via UI and API. The same across versions of a prompt. """ - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = pydantic.Field(default=None) """ Commit message for this prompt version. """ - resolution_graph: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field( - alias="resolutionGraph", default=None - ) + resolution_graph: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, typing.Any]], + FieldMetadata(alias="resolutionGraph"), + ] = pydantic.Field(default=None) """ The dependency resolution graph for the current prompt. Null if prompt has no dependencies. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/chat_message.py b/langfuse/api/resources/prompts/types/chat_message.py index d009bc8cf..9389cc24c 100644 --- a/langfuse/api/resources/prompts/types/chat_message.py +++ b/langfuse/api/resources/prompts/types/chat_message.py @@ -1,43 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ChatMessage(pydantic_v1.BaseModel): +class ChatMessage(UniversalBaseModel): role: str content: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/chat_message_with_placeholders.py b/langfuse/api/resources/prompts/types/chat_message_with_placeholders.py index dc12d5073..e077ca144 100644 --- a/langfuse/api/resources/prompts/types/chat_message_with_placeholders.py +++ b/langfuse/api/resources/prompts/types/chat_message_with_placeholders.py @@ -1,87 +1,8 @@ # This file was auto-generated by Fern from our API Definition. -from __future__ import annotations - -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - - -class ChatMessageWithPlaceholders_Chatmessage(pydantic_v1.BaseModel): - role: str - content: str - type: typing.Literal["chatmessage"] = "chatmessage" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class ChatMessageWithPlaceholders_Placeholder(pydantic_v1.BaseModel): - name: str - type: typing.Literal["placeholder"] = "placeholder" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - +from .chat_message import ChatMessage +from .placeholder_message import PlaceholderMessage -ChatMessageWithPlaceholders = typing.Union[ - ChatMessageWithPlaceholders_Chatmessage, ChatMessageWithPlaceholders_Placeholder -] +ChatMessageWithPlaceholders = typing.Union[ChatMessage, PlaceholderMessage] diff --git a/langfuse/api/resources/prompts/types/chat_prompt.py b/langfuse/api/resources/prompts/types/chat_prompt.py index 494449ea2..0d47e1a68 100644 --- a/langfuse/api/resources/prompts/types/chat_prompt.py +++ b/langfuse/api/resources/prompts/types/chat_prompt.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_prompt import BasePrompt from .chat_message_with_placeholders import ChatMessageWithPlaceholders @@ -12,35 +11,13 @@ class ChatPrompt(BasePrompt): prompt: typing.List[ChatMessageWithPlaceholders] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/create_chat_prompt_request.py b/langfuse/api/resources/prompts/types/create_chat_prompt_request.py index 1442164a6..8d03703b5 100644 --- a/langfuse/api/resources/prompts/types/create_chat_prompt_request.py +++ b/langfuse/api/resources/prompts/types/create_chat_prompt_request.py @@ -1,63 +1,42 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .chat_message_with_placeholders import ChatMessageWithPlaceholders -class CreateChatPromptRequest(pydantic_v1.BaseModel): +class CreateChatPromptRequest(UniversalBaseModel): name: str prompt: typing.List[ChatMessageWithPlaceholders] config: typing.Optional[typing.Any] = None - labels: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + labels: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ List of deployment labels of this prompt version. """ - tags: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + tags: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ List of tags to apply to all versions of this prompt. """ - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = pydantic.Field(default=None) """ Commit message for this prompt version. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/create_prompt_request.py b/langfuse/api/resources/prompts/types/create_prompt_request.py index b9518a7c4..0881c5e0b 100644 --- a/langfuse/api/resources/prompts/types/create_prompt_request.py +++ b/langfuse/api/resources/prompts/types/create_prompt_request.py @@ -2,102 +2,62 @@ from __future__ import annotations -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .chat_message_with_placeholders import ChatMessageWithPlaceholders -class CreatePromptRequest_Chat(pydantic_v1.BaseModel): +class CreatePromptRequest_Chat(UniversalBaseModel): + type: typing.Literal["chat"] = "chat" name: str prompt: typing.List[ChatMessageWithPlaceholders] config: typing.Optional[typing.Any] = None labels: typing.Optional[typing.List[str]] = None tags: typing.Optional[typing.List[str]] = None - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) - type: typing.Literal["chat"] = "chat" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = None - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - -class CreatePromptRequest_Text(pydantic_v1.BaseModel): +class CreatePromptRequest_Text(UniversalBaseModel): + type: typing.Literal["text"] = "text" name: str prompt: str config: typing.Optional[typing.Any] = None labels: typing.Optional[typing.List[str]] = None tags: typing.Optional[typing.List[str]] = None - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) - type: typing.Literal["text"] = "text" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -CreatePromptRequest = typing.Union[CreatePromptRequest_Chat, CreatePromptRequest_Text] + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +CreatePromptRequest = typing_extensions.Annotated[ + typing.Union[CreatePromptRequest_Chat, CreatePromptRequest_Text], + pydantic.Field(discriminator="type"), +] diff --git a/langfuse/api/resources/prompts/types/create_text_prompt_request.py b/langfuse/api/resources/prompts/types/create_text_prompt_request.py index d35fbb24d..ffba8a6cd 100644 --- a/langfuse/api/resources/prompts/types/create_text_prompt_request.py +++ b/langfuse/api/resources/prompts/types/create_text_prompt_request.py @@ -1,62 +1,41 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class CreateTextPromptRequest(pydantic_v1.BaseModel): +class CreateTextPromptRequest(UniversalBaseModel): name: str prompt: str config: typing.Optional[typing.Any] = None - labels: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + labels: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ List of deployment labels of this prompt version. """ - tags: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + tags: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ List of tags to apply to all versions of this prompt. """ - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = pydantic.Field(default=None) """ Commit message for this prompt version. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/placeholder_message.py b/langfuse/api/resources/prompts/types/placeholder_message.py index a3352b391..c802a1607 100644 --- a/langfuse/api/resources/prompts/types/placeholder_message.py +++ b/langfuse/api/resources/prompts/types/placeholder_message.py @@ -1,42 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class PlaceholderMessage(pydantic_v1.BaseModel): +class PlaceholderMessage(UniversalBaseModel): name: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + type: str + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/prompt.py b/langfuse/api/resources/prompts/types/prompt.py index 1ad894879..999b9c536 100644 --- a/langfuse/api/resources/prompts/types/prompt.py +++ b/langfuse/api/resources/prompts/types/prompt.py @@ -2,110 +2,71 @@ from __future__ import annotations -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .chat_message_with_placeholders import ChatMessageWithPlaceholders -class Prompt_Chat(pydantic_v1.BaseModel): +class Prompt_Chat(UniversalBaseModel): + type: typing.Literal["chat"] = "chat" prompt: typing.List[ChatMessageWithPlaceholders] name: str version: int config: typing.Any labels: typing.List[str] tags: typing.List[str] - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) - resolution_graph: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field( - alias="resolutionGraph", default=None - ) - type: typing.Literal["chat"] = "chat" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class Prompt_Text(pydantic_v1.BaseModel): + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = None + resolution_graph: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, typing.Any]], + FieldMetadata(alias="resolutionGraph"), + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class Prompt_Text(UniversalBaseModel): + type: typing.Literal["text"] = "text" prompt: str name: str version: int config: typing.Any labels: typing.List[str] tags: typing.List[str] - commit_message: typing.Optional[str] = pydantic_v1.Field( - alias="commitMessage", default=None - ) - resolution_graph: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field( - alias="resolutionGraph", default=None - ) - type: typing.Literal["text"] = "text" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -Prompt = typing.Union[Prompt_Chat, Prompt_Text] + commit_message: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="commitMessage") + ] = None + resolution_graph: typing_extensions.Annotated[ + typing.Optional[typing.Dict[str, typing.Any]], + FieldMetadata(alias="resolutionGraph"), + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +Prompt = typing_extensions.Annotated[ + typing.Union[Prompt_Chat, Prompt_Text], pydantic.Field(discriminator="type") +] diff --git a/langfuse/api/resources/prompts/types/prompt_meta.py b/langfuse/api/resources/prompts/types/prompt_meta.py index 35f8a06cf..3caeec1c8 100644 --- a/langfuse/api/resources/prompts/types/prompt_meta.py +++ b/langfuse/api/resources/prompts/types/prompt_meta.py @@ -3,14 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .prompt_type import PromptType -class PromptMeta(pydantic_v1.BaseModel): +class PromptMeta(UniversalBaseModel): name: str - type: PromptType = pydantic_v1.Field() + type: PromptType = pydantic.Field() """ Indicates whether the prompt is a text or chat prompt. """ @@ -18,41 +20,23 @@ class PromptMeta(pydantic_v1.BaseModel): versions: typing.List[int] labels: typing.List[str] tags: typing.List[str] - last_updated_at: dt.datetime = pydantic_v1.Field(alias="lastUpdatedAt") - last_config: typing.Any = pydantic_v1.Field(alias="lastConfig") + last_updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="lastUpdatedAt") + ] + last_config: typing_extensions.Annotated[ + typing.Any, FieldMetadata(alias="lastConfig") + ] = pydantic.Field() """ Config object of the most recent prompt version that matches the filters (if any are provided) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/prompt_meta_list_response.py b/langfuse/api/resources/prompts/types/prompt_meta_list_response.py index d3dccf650..4acf636bb 100644 --- a/langfuse/api/resources/prompts/types/prompt_meta_list_response.py +++ b/langfuse/api/resources/prompts/types/prompt_meta_list_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...utils.resources.pagination.types.meta_response import MetaResponse from .prompt_meta import PromptMeta -class PromptMetaListResponse(pydantic_v1.BaseModel): +class PromptMetaListResponse(UniversalBaseModel): data: typing.List[PromptMeta] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/prompts/types/prompt_type.py b/langfuse/api/resources/prompts/types/prompt_type.py index 958d544a6..a6676ae50 100644 --- a/langfuse/api/resources/prompts/types/prompt_type.py +++ b/langfuse/api/resources/prompts/types/prompt_type.py @@ -1,19 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -import enum import typing -T_Result = typing.TypeVar("T_Result") - - -class PromptType(str, enum.Enum): - CHAT = "chat" - TEXT = "text" - - def visit( - self, chat: typing.Callable[[], T_Result], text: typing.Callable[[], T_Result] - ) -> T_Result: - if self is PromptType.CHAT: - return chat() - if self is PromptType.TEXT: - return text() +PromptType = typing.Union[typing.Literal["chat", "text"], typing.Any] diff --git a/langfuse/api/resources/prompts/types/text_prompt.py b/langfuse/api/resources/prompts/types/text_prompt.py index e149ea322..1de5bbf39 100644 --- a/langfuse/api/resources/prompts/types/text_prompt.py +++ b/langfuse/api/resources/prompts/types/text_prompt.py @@ -1,45 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from .base_prompt import BasePrompt class TextPrompt(BasePrompt): prompt: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/__init__.py b/langfuse/api/resources/scim/__init__.py index 29655a8da..6c4126f3c 100644 --- a/langfuse/api/resources/scim/__init__.py +++ b/langfuse/api/resources/scim/__init__.py @@ -1,24 +1,77 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - AuthenticationScheme, - BulkConfig, - EmptyResponse, - FilterConfig, - ResourceMeta, - ResourceType, - ResourceTypesResponse, - SchemaExtension, - SchemaResource, - SchemasResponse, - ScimEmail, - ScimFeatureSupport, - ScimName, - ScimUser, - ScimUsersListResponse, - ServiceProviderConfig, - UserMeta, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + AuthenticationScheme, + BulkConfig, + EmptyResponse, + FilterConfig, + ResourceMeta, + ResourceType, + ResourceTypesResponse, + SchemaExtension, + SchemaResource, + SchemasResponse, + ScimEmail, + ScimFeatureSupport, + ScimName, + ScimUser, + ScimUsersListResponse, + ServiceProviderConfig, + UserMeta, + ) +_dynamic_imports: typing.Dict[str, str] = { + "AuthenticationScheme": ".types", + "BulkConfig": ".types", + "EmptyResponse": ".types", + "FilterConfig": ".types", + "ResourceMeta": ".types", + "ResourceType": ".types", + "ResourceTypesResponse": ".types", + "SchemaExtension": ".types", + "SchemaResource": ".types", + "SchemasResponse": ".types", + "ScimEmail": ".types", + "ScimFeatureSupport": ".types", + "ScimName": ".types", + "ScimUser": ".types", + "ScimUsersListResponse": ".types", + "ServiceProviderConfig": ".types", + "UserMeta": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AuthenticationScheme", diff --git a/langfuse/api/resources/scim/client.py b/langfuse/api/resources/scim/client.py index 38523a4f9..2dcf4e849 100644 --- a/langfuse/api/resources/scim/client.py +++ b/langfuse/api/resources/scim/client.py @@ -1,18 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawScimClient, RawScimClient from .types.empty_response import EmptyResponse from .types.resource_types_response import ResourceTypesResponse from .types.schemas_response import SchemasResponse @@ -28,7 +20,18 @@ class ScimClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawScimClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawScimClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawScimClient + """ + return self._raw_client def get_service_provider_config( self, *, request_options: typing.Optional[RequestOptions] = None @@ -47,7 +50,7 @@ def get_service_provider_config( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -59,36 +62,10 @@ def get_service_provider_config( ) client.scim.get_service_provider_config() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/scim/ServiceProviderConfig", - method="GET", - request_options=request_options, + _response = self._raw_client.get_service_provider_config( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ServiceProviderConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_resource_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -107,7 +84,7 @@ def get_resource_types( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -119,36 +96,8 @@ def get_resource_types( ) client.scim.get_resource_types() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/scim/ResourceTypes", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ResourceTypesResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get_resource_types(request_options=request_options) + return _response.data def get_schemas( self, *, request_options: typing.Optional[RequestOptions] = None @@ -167,7 +116,7 @@ def get_schemas( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -179,34 +128,8 @@ def get_schemas( ) client.scim.get_schemas() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/scim/Schemas", method="GET", request_options=request_options - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(SchemasResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get_schemas(request_options=request_options) + return _response.data def list_users( self, @@ -239,7 +162,7 @@ def list_users( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -251,37 +174,13 @@ def list_users( ) client.scim.list_users() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/scim/Users", - method="GET", - params={"filter": filter, "startIndex": start_index, "count": count}, + _response = self._raw_client.list_users( + filter=filter, + start_index=start_index, + count=count, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScimUsersListResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_user( self, @@ -322,8 +221,8 @@ def create_user( Examples -------- - from langfuse import ScimName - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.scim import ScimName client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -338,44 +237,15 @@ def create_user( name=ScimName(), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/scim/Users", - method="POST", - json={ - "userName": user_name, - "name": name, - "emails": emails, - "active": active, - "password": password, - }, + _response = self._raw_client.create_user( + user_name=user_name, + name=name, + emails=emails, + active=active, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScimUser, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_user( self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -396,7 +266,7 @@ def get_user( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -410,36 +280,8 @@ def get_user( user_id="userId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/scim/Users/{jsonable_encoder(user_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScimUser, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get_user(user_id, request_options=request_options) + return _response.data def delete_user( self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -460,7 +302,7 @@ def delete_user( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -474,41 +316,26 @@ def delete_user( user_id="userId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/scim/Users/{jsonable_encoder(user_id)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.delete_user( + user_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmptyResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncScimClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawScimClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawScimClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawScimClient + """ + return self._raw_client async def get_service_provider_config( self, *, request_options: typing.Optional[RequestOptions] = None @@ -529,7 +356,7 @@ async def get_service_provider_config( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -547,36 +374,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/scim/ServiceProviderConfig", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_service_provider_config( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ServiceProviderConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_resource_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -597,7 +398,7 @@ async def get_resource_types( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -615,36 +416,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/scim/ResourceTypes", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_resource_types( + request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ResourceTypesResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_schemas( self, *, request_options: typing.Optional[RequestOptions] = None @@ -665,7 +440,7 @@ async def get_schemas( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -683,34 +458,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/scim/Schemas", method="GET", request_options=request_options - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(SchemasResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get_schemas(request_options=request_options) + return _response.data async def list_users( self, @@ -745,7 +494,7 @@ async def list_users( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -763,37 +512,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/scim/Users", - method="GET", - params={"filter": filter, "startIndex": start_index, "count": count}, + _response = await self._raw_client.list_users( + filter=filter, + start_index=start_index, + count=count, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScimUsersListResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_user( self, @@ -836,8 +561,8 @@ async def create_user( -------- import asyncio - from langfuse import ScimName - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.scim import ScimName client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -858,44 +583,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/scim/Users", - method="POST", - json={ - "userName": user_name, - "name": name, - "emails": emails, - "active": active, - "password": password, - }, + _response = await self._raw_client.create_user( + user_name=user_name, + name=name, + emails=emails, + active=active, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScimUser, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_user( self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -918,7 +614,7 @@ async def get_user( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -938,36 +634,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/scim/Users/{jsonable_encoder(user_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_user( + user_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScimUser, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_user( self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -990,7 +660,7 @@ async def delete_user( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1010,33 +680,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/scim/Users/{jsonable_encoder(user_id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete_user( + user_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmptyResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/scim/raw_client.py b/langfuse/api/resources/scim/raw_client.py new file mode 100644 index 000000000..799399f4d --- /dev/null +++ b/langfuse/api/resources/scim/raw_client.py @@ -0,0 +1,1528 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.empty_response import EmptyResponse +from .types.resource_types_response import ResourceTypesResponse +from .types.schemas_response import SchemasResponse +from .types.scim_email import ScimEmail +from .types.scim_name import ScimName +from .types.scim_user import ScimUser +from .types.scim_users_list_response import ScimUsersListResponse +from .types.service_provider_config import ServiceProviderConfig + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawScimClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get_service_provider_config( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ServiceProviderConfig]: + """ + Get SCIM Service Provider Configuration (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ServiceProviderConfig] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/scim/ServiceProviderConfig", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ServiceProviderConfig, + parse_obj_as( + type_=ServiceProviderConfig, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_resource_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ResourceTypesResponse]: + """ + Get SCIM Resource Types (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ResourceTypesResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/scim/ResourceTypes", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ResourceTypesResponse, + parse_obj_as( + type_=ResourceTypesResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_schemas( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[SchemasResponse]: + """ + Get SCIM Schemas (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[SchemasResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/scim/Schemas", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + SchemasResponse, + parse_obj_as( + type_=SchemasResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list_users( + self, + *, + filter: typing.Optional[str] = None, + start_index: typing.Optional[int] = None, + count: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ScimUsersListResponse]: + """ + List users in the organization (requires organization-scoped API key) + + Parameters + ---------- + filter : typing.Optional[str] + Filter expression (e.g. userName eq "value") + + start_index : typing.Optional[int] + 1-based index of the first result to return (default 1) + + count : typing.Optional[int] + Maximum number of results to return (default 100) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScimUsersListResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/scim/Users", + method="GET", + params={ + "filter": filter, + "startIndex": start_index, + "count": count, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScimUsersListResponse, + parse_obj_as( + type_=ScimUsersListResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def create_user( + self, + *, + user_name: str, + name: ScimName, + emails: typing.Optional[typing.Sequence[ScimEmail]] = OMIT, + active: typing.Optional[bool] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ScimUser]: + """ + Create a new user in the organization (requires organization-scoped API key) + + Parameters + ---------- + user_name : str + User's email address (required) + + name : ScimName + User's name information + + emails : typing.Optional[typing.Sequence[ScimEmail]] + User's email addresses + + active : typing.Optional[bool] + Whether the user is active + + password : typing.Optional[str] + Initial password for the user + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScimUser] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/scim/Users", + method="POST", + json={ + "userName": user_name, + "name": convert_and_respect_annotation_metadata( + object_=name, annotation=ScimName, direction="write" + ), + "emails": convert_and_respect_annotation_metadata( + object_=emails, + annotation=typing.Sequence[ScimEmail], + direction="write", + ), + "active": active, + "password": password, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScimUser, + parse_obj_as( + type_=ScimUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_user( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ScimUser]: + """ + Get a specific user by ID (requires organization-scoped API key) + + Parameters + ---------- + user_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScimUser] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/scim/Users/{jsonable_encoder(user_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScimUser, + parse_obj_as( + type_=ScimUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_user( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[EmptyResponse]: + """ + Remove a user from the organization (requires organization-scoped API key). Note that this only removes the user from the organization but does not delete the user entity itself. + + Parameters + ---------- + user_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[EmptyResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/scim/Users/{jsonable_encoder(user_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + EmptyResponse, + parse_obj_as( + type_=EmptyResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawScimClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get_service_provider_config( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ServiceProviderConfig]: + """ + Get SCIM Service Provider Configuration (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ServiceProviderConfig] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/scim/ServiceProviderConfig", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ServiceProviderConfig, + parse_obj_as( + type_=ServiceProviderConfig, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_resource_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ResourceTypesResponse]: + """ + Get SCIM Resource Types (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ResourceTypesResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/scim/ResourceTypes", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ResourceTypesResponse, + parse_obj_as( + type_=ResourceTypesResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_schemas( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[SchemasResponse]: + """ + Get SCIM Schemas (requires organization-scoped API key) + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[SchemasResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/scim/Schemas", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + SchemasResponse, + parse_obj_as( + type_=SchemasResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list_users( + self, + *, + filter: typing.Optional[str] = None, + start_index: typing.Optional[int] = None, + count: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ScimUsersListResponse]: + """ + List users in the organization (requires organization-scoped API key) + + Parameters + ---------- + filter : typing.Optional[str] + Filter expression (e.g. userName eq "value") + + start_index : typing.Optional[int] + 1-based index of the first result to return (default 1) + + count : typing.Optional[int] + Maximum number of results to return (default 100) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScimUsersListResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/scim/Users", + method="GET", + params={ + "filter": filter, + "startIndex": start_index, + "count": count, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScimUsersListResponse, + parse_obj_as( + type_=ScimUsersListResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def create_user( + self, + *, + user_name: str, + name: ScimName, + emails: typing.Optional[typing.Sequence[ScimEmail]] = OMIT, + active: typing.Optional[bool] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ScimUser]: + """ + Create a new user in the organization (requires organization-scoped API key) + + Parameters + ---------- + user_name : str + User's email address (required) + + name : ScimName + User's name information + + emails : typing.Optional[typing.Sequence[ScimEmail]] + User's email addresses + + active : typing.Optional[bool] + Whether the user is active + + password : typing.Optional[str] + Initial password for the user + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScimUser] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/scim/Users", + method="POST", + json={ + "userName": user_name, + "name": convert_and_respect_annotation_metadata( + object_=name, annotation=ScimName, direction="write" + ), + "emails": convert_and_respect_annotation_metadata( + object_=emails, + annotation=typing.Sequence[ScimEmail], + direction="write", + ), + "active": active, + "password": password, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScimUser, + parse_obj_as( + type_=ScimUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_user( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ScimUser]: + """ + Get a specific user by ID (requires organization-scoped API key) + + Parameters + ---------- + user_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScimUser] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/scim/Users/{jsonable_encoder(user_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScimUser, + parse_obj_as( + type_=ScimUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_user( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[EmptyResponse]: + """ + Remove a user from the organization (requires organization-scoped API key). Note that this only removes the user from the organization but does not delete the user entity itself. + + Parameters + ---------- + user_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[EmptyResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/scim/Users/{jsonable_encoder(user_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + EmptyResponse, + parse_obj_as( + type_=EmptyResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/scim/types/__init__.py b/langfuse/api/resources/scim/types/__init__.py index c0b60e8c2..9d6483e3d 100644 --- a/langfuse/api/resources/scim/types/__init__.py +++ b/langfuse/api/resources/scim/types/__init__.py @@ -1,22 +1,75 @@ # This file was auto-generated by Fern from our API Definition. -from .authentication_scheme import AuthenticationScheme -from .bulk_config import BulkConfig -from .empty_response import EmptyResponse -from .filter_config import FilterConfig -from .resource_meta import ResourceMeta -from .resource_type import ResourceType -from .resource_types_response import ResourceTypesResponse -from .schema_extension import SchemaExtension -from .schema_resource import SchemaResource -from .schemas_response import SchemasResponse -from .scim_email import ScimEmail -from .scim_feature_support import ScimFeatureSupport -from .scim_name import ScimName -from .scim_user import ScimUser -from .scim_users_list_response import ScimUsersListResponse -from .service_provider_config import ServiceProviderConfig -from .user_meta import UserMeta +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .authentication_scheme import AuthenticationScheme + from .bulk_config import BulkConfig + from .empty_response import EmptyResponse + from .filter_config import FilterConfig + from .resource_meta import ResourceMeta + from .resource_type import ResourceType + from .resource_types_response import ResourceTypesResponse + from .schema_extension import SchemaExtension + from .schema_resource import SchemaResource + from .schemas_response import SchemasResponse + from .scim_email import ScimEmail + from .scim_feature_support import ScimFeatureSupport + from .scim_name import ScimName + from .scim_user import ScimUser + from .scim_users_list_response import ScimUsersListResponse + from .service_provider_config import ServiceProviderConfig + from .user_meta import UserMeta +_dynamic_imports: typing.Dict[str, str] = { + "AuthenticationScheme": ".authentication_scheme", + "BulkConfig": ".bulk_config", + "EmptyResponse": ".empty_response", + "FilterConfig": ".filter_config", + "ResourceMeta": ".resource_meta", + "ResourceType": ".resource_type", + "ResourceTypesResponse": ".resource_types_response", + "SchemaExtension": ".schema_extension", + "SchemaResource": ".schema_resource", + "SchemasResponse": ".schemas_response", + "ScimEmail": ".scim_email", + "ScimFeatureSupport": ".scim_feature_support", + "ScimName": ".scim_name", + "ScimUser": ".scim_user", + "ScimUsersListResponse": ".scim_users_list_response", + "ServiceProviderConfig": ".service_provider_config", + "UserMeta": ".user_meta", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "AuthenticationScheme", diff --git a/langfuse/api/resources/scim/types/authentication_scheme.py b/langfuse/api/resources/scim/types/authentication_scheme.py index 6d6526901..2d8650f84 100644 --- a/langfuse/api/resources/scim/types/authentication_scheme.py +++ b/langfuse/api/resources/scim/types/authentication_scheme.py @@ -1,48 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class AuthenticationScheme(pydantic_v1.BaseModel): +class AuthenticationScheme(UniversalBaseModel): name: str description: str - spec_uri: str = pydantic_v1.Field(alias="specUri") + spec_uri: typing_extensions.Annotated[str, FieldMetadata(alias="specUri")] type: str primary: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/bulk_config.py b/langfuse/api/resources/scim/types/bulk_config.py index 0b41af5cf..4522ae41b 100644 --- a/langfuse/api/resources/scim/types/bulk_config.py +++ b/langfuse/api/resources/scim/types/bulk_config.py @@ -1,46 +1,29 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class BulkConfig(pydantic_v1.BaseModel): +class BulkConfig(UniversalBaseModel): supported: bool - max_operations: int = pydantic_v1.Field(alias="maxOperations") - max_payload_size: int = pydantic_v1.Field(alias="maxPayloadSize") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + max_operations: typing_extensions.Annotated[ + int, FieldMetadata(alias="maxOperations") + ] + max_payload_size: typing_extensions.Annotated[ + int, FieldMetadata(alias="maxPayloadSize") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/empty_response.py b/langfuse/api/resources/scim/types/empty_response.py index 82105e8a3..1e68eb590 100644 --- a/langfuse/api/resources/scim/types/empty_response.py +++ b/langfuse/api/resources/scim/types/empty_response.py @@ -1,44 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class EmptyResponse(pydantic_v1.BaseModel): +class EmptyResponse(UniversalBaseModel): """ Empty response for 204 No Content responses """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/filter_config.py b/langfuse/api/resources/scim/types/filter_config.py index 2bd035867..e4a7954ad 100644 --- a/langfuse/api/resources/scim/types/filter_config.py +++ b/langfuse/api/resources/scim/types/filter_config.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class FilterConfig(pydantic_v1.BaseModel): +class FilterConfig(UniversalBaseModel): supported: bool - max_results: int = pydantic_v1.Field(alias="maxResults") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + max_results: typing_extensions.Annotated[int, FieldMetadata(alias="maxResults")] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/resource_meta.py b/langfuse/api/resources/scim/types/resource_meta.py index a61d14442..9c2549ca8 100644 --- a/langfuse/api/resources/scim/types/resource_meta.py +++ b/langfuse/api/resources/scim/types/resource_meta.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class ResourceMeta(pydantic_v1.BaseModel): - resource_type: str = pydantic_v1.Field(alias="resourceType") +class ResourceMeta(UniversalBaseModel): + resource_type: typing_extensions.Annotated[str, FieldMetadata(alias="resourceType")] location: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/resource_type.py b/langfuse/api/resources/scim/types/resource_type.py index 264dc87cf..803aa48b0 100644 --- a/langfuse/api/resources/scim/types/resource_type.py +++ b/langfuse/api/resources/scim/types/resource_type.py @@ -1,55 +1,34 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .resource_meta import ResourceMeta from .schema_extension import SchemaExtension -class ResourceType(pydantic_v1.BaseModel): +class ResourceType(UniversalBaseModel): schemas: typing.Optional[typing.List[str]] = None id: str name: str endpoint: str description: str - schema_: str = pydantic_v1.Field(alias="schema") - schema_extensions: typing.List[SchemaExtension] = pydantic_v1.Field( - alias="schemaExtensions" - ) + schema_: typing_extensions.Annotated[str, FieldMetadata(alias="schema")] + schema_extensions: typing_extensions.Annotated[ + typing.List[SchemaExtension], FieldMetadata(alias="schemaExtensions") + ] meta: ResourceMeta - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/resource_types_response.py b/langfuse/api/resources/scim/types/resource_types_response.py index cce65b8d1..a3f7bea63 100644 --- a/langfuse/api/resources/scim/types/resource_types_response.py +++ b/langfuse/api/resources/scim/types/resource_types_response.py @@ -1,47 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .resource_type import ResourceType -class ResourceTypesResponse(pydantic_v1.BaseModel): +class ResourceTypesResponse(UniversalBaseModel): schemas: typing.List[str] - total_results: int = pydantic_v1.Field(alias="totalResults") - resources: typing.List[ResourceType] = pydantic_v1.Field(alias="Resources") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + total_results: typing_extensions.Annotated[int, FieldMetadata(alias="totalResults")] + resources: typing_extensions.Annotated[ + typing.List[ResourceType], FieldMetadata(alias="Resources") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/schema_extension.py b/langfuse/api/resources/scim/types/schema_extension.py index c5ede44b9..a6a8c1cc8 100644 --- a/langfuse/api/resources/scim/types/schema_extension.py +++ b/langfuse/api/resources/scim/types/schema_extension.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class SchemaExtension(pydantic_v1.BaseModel): - schema_: str = pydantic_v1.Field(alias="schema") +class SchemaExtension(UniversalBaseModel): + schema_: typing_extensions.Annotated[str, FieldMetadata(alias="schema")] required: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/schema_resource.py b/langfuse/api/resources/scim/types/schema_resource.py index e85cda9a0..bbfca86c9 100644 --- a/langfuse/api/resources/scim/types/schema_resource.py +++ b/langfuse/api/resources/scim/types/schema_resource.py @@ -1,47 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .resource_meta import ResourceMeta -class SchemaResource(pydantic_v1.BaseModel): +class SchemaResource(UniversalBaseModel): id: str name: str description: str attributes: typing.List[typing.Any] meta: ResourceMeta - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/schemas_response.py b/langfuse/api/resources/scim/types/schemas_response.py index 4c7b8199a..65c3b4411 100644 --- a/langfuse/api/resources/scim/types/schemas_response.py +++ b/langfuse/api/resources/scim/types/schemas_response.py @@ -1,47 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .schema_resource import SchemaResource -class SchemasResponse(pydantic_v1.BaseModel): +class SchemasResponse(UniversalBaseModel): schemas: typing.List[str] - total_results: int = pydantic_v1.Field(alias="totalResults") - resources: typing.List[SchemaResource] = pydantic_v1.Field(alias="Resources") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + total_results: typing_extensions.Annotated[int, FieldMetadata(alias="totalResults")] + resources: typing_extensions.Annotated[ + typing.List[SchemaResource], FieldMetadata(alias="Resources") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/scim_email.py b/langfuse/api/resources/scim/types/scim_email.py index 71b817809..361219410 100644 --- a/langfuse/api/resources/scim/types/scim_email.py +++ b/langfuse/api/resources/scim/types/scim_email.py @@ -1,44 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ScimEmail(pydantic_v1.BaseModel): +class ScimEmail(UniversalBaseModel): primary: bool value: str type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/scim_feature_support.py b/langfuse/api/resources/scim/types/scim_feature_support.py index 2aedc07b5..50a79a155 100644 --- a/langfuse/api/resources/scim/types/scim_feature_support.py +++ b/langfuse/api/resources/scim/types/scim_feature_support.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ScimFeatureSupport(pydantic_v1.BaseModel): +class ScimFeatureSupport(UniversalBaseModel): supported: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/scim_name.py b/langfuse/api/resources/scim/types/scim_name.py index c2812a25a..7a0f4091d 100644 --- a/langfuse/api/resources/scim/types/scim_name.py +++ b/langfuse/api/resources/scim/types/scim_name.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ScimName(pydantic_v1.BaseModel): +class ScimName(UniversalBaseModel): formatted: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/scim_user.py b/langfuse/api/resources/scim/types/scim_user.py index 581bab8c1..f3e3d4426 100644 --- a/langfuse/api/resources/scim/types/scim_user.py +++ b/langfuse/api/resources/scim/types/scim_user.py @@ -1,52 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .scim_email import ScimEmail from .scim_name import ScimName from .user_meta import UserMeta -class ScimUser(pydantic_v1.BaseModel): +class ScimUser(UniversalBaseModel): schemas: typing.List[str] id: str - user_name: str = pydantic_v1.Field(alias="userName") + user_name: typing_extensions.Annotated[str, FieldMetadata(alias="userName")] name: ScimName emails: typing.List[ScimEmail] meta: UserMeta - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/scim_users_list_response.py b/langfuse/api/resources/scim/types/scim_users_list_response.py index 3c41a4d16..a848871b9 100644 --- a/langfuse/api/resources/scim/types/scim_users_list_response.py +++ b/langfuse/api/resources/scim/types/scim_users_list_response.py @@ -1,49 +1,32 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .scim_user import ScimUser -class ScimUsersListResponse(pydantic_v1.BaseModel): +class ScimUsersListResponse(UniversalBaseModel): schemas: typing.List[str] - total_results: int = pydantic_v1.Field(alias="totalResults") - start_index: int = pydantic_v1.Field(alias="startIndex") - items_per_page: int = pydantic_v1.Field(alias="itemsPerPage") - resources: typing.List[ScimUser] = pydantic_v1.Field(alias="Resources") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + total_results: typing_extensions.Annotated[int, FieldMetadata(alias="totalResults")] + start_index: typing_extensions.Annotated[int, FieldMetadata(alias="startIndex")] + items_per_page: typing_extensions.Annotated[ + int, FieldMetadata(alias="itemsPerPage") + ] + resources: typing_extensions.Annotated[ + typing.List[ScimUser], FieldMetadata(alias="Resources") + ] + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/service_provider_config.py b/langfuse/api/resources/scim/types/service_provider_config.py index 9bf611ae6..4f28de773 100644 --- a/langfuse/api/resources/scim/types/service_provider_config.py +++ b/langfuse/api/resources/scim/types/service_provider_config.py @@ -1,10 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from .authentication_scheme import AuthenticationScheme from .bulk_config import BulkConfig from .filter_config import FilterConfig @@ -12,49 +13,31 @@ from .scim_feature_support import ScimFeatureSupport -class ServiceProviderConfig(pydantic_v1.BaseModel): +class ServiceProviderConfig(UniversalBaseModel): schemas: typing.List[str] - documentation_uri: str = pydantic_v1.Field(alias="documentationUri") + documentation_uri: typing_extensions.Annotated[ + str, FieldMetadata(alias="documentationUri") + ] patch: ScimFeatureSupport bulk: BulkConfig filter: FilterConfig - change_password: ScimFeatureSupport = pydantic_v1.Field(alias="changePassword") + change_password: typing_extensions.Annotated[ + ScimFeatureSupport, FieldMetadata(alias="changePassword") + ] sort: ScimFeatureSupport etag: ScimFeatureSupport - authentication_schemes: typing.List[AuthenticationScheme] = pydantic_v1.Field( - alias="authenticationSchemes" - ) + authentication_schemes: typing_extensions.Annotated[ + typing.List[AuthenticationScheme], FieldMetadata(alias="authenticationSchemes") + ] meta: ResourceMeta - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/scim/types/user_meta.py b/langfuse/api/resources/scim/types/user_meta.py index 09cb7e6a0..80c839253 100644 --- a/langfuse/api/resources/scim/types/user_meta.py +++ b/langfuse/api/resources/scim/types/user_meta.py @@ -1,48 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class UserMeta(pydantic_v1.BaseModel): - resource_type: str = pydantic_v1.Field(alias="resourceType") +class UserMeta(UniversalBaseModel): + resource_type: typing_extensions.Annotated[str, FieldMetadata(alias="resourceType")] created: typing.Optional[str] = None - last_modified: typing.Optional[str] = pydantic_v1.Field( - alias="lastModified", default=None - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + last_modified: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="lastModified") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score/__init__.py b/langfuse/api/resources/score/__init__.py index 566310af3..3d0c7422a 100644 --- a/langfuse/api/resources/score/__init__.py +++ b/langfuse/api/resources/score/__init__.py @@ -1,5 +1,43 @@ # This file was auto-generated by Fern from our API Definition. -from .types import CreateScoreRequest, CreateScoreResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import CreateScoreRequest, CreateScoreResponse +_dynamic_imports: typing.Dict[str, str] = { + "CreateScoreRequest": ".types", + "CreateScoreResponse": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateScoreRequest", "CreateScoreResponse"] diff --git a/langfuse/api/resources/score/client.py b/langfuse/api/resources/score/client.py index 0c259929f..bb6be1fa8 100644 --- a/langfuse/api/resources/score/client.py +++ b/langfuse/api/resources/score/client.py @@ -1,18 +1,10 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError +from .raw_client import AsyncRawScoreClient, RawScoreClient from .types.create_score_request import CreateScoreRequest from .types.create_score_response import CreateScoreResponse @@ -22,7 +14,18 @@ class ScoreClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawScoreClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawScoreClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawScoreClient + """ + return self._raw_client def create( self, @@ -46,8 +49,8 @@ def create( Examples -------- - from langfuse import CreateScoreRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.score import CreateScoreRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -64,38 +67,10 @@ def create( ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/scores", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateScoreResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete( self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -117,7 +92,7 @@ def delete( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -131,41 +106,24 @@ def delete( score_id="scoreId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/scores/{jsonable_encoder(score_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(score_id, request_options=request_options) + return _response.data class AsyncScoreClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawScoreClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawScoreClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawScoreClient + """ + return self._raw_client async def create( self, @@ -191,8 +149,8 @@ async def create( -------- import asyncio - from langfuse import CreateScoreRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.score import CreateScoreRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -215,38 +173,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/scores", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateScoreResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete( self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -270,7 +200,7 @@ async def delete( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -290,33 +220,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/scores/{jsonable_encoder(score_id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete( + score_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/score/raw_client.py b/langfuse/api/resources/score/raw_client.py new file mode 100644 index 000000000..981b8d3d6 --- /dev/null +++ b/langfuse/api/resources/score/raw_client.py @@ -0,0 +1,434 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from .types.create_score_request import CreateScoreRequest +from .types.create_score_response import CreateScoreResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawScoreClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + request: CreateScoreRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[CreateScoreResponse]: + """ + Create a score (supports both trace and session scores) + + Parameters + ---------- + request : CreateScoreRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[CreateScoreResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/scores", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateScoreRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CreateScoreResponse, + parse_obj_as( + type_=CreateScoreResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete( + self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + Delete a score (supports both trace and session scores) + + Parameters + ---------- + score_id : str + The unique langfuse identifier of a score + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/scores/{jsonable_encoder(score_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawScoreClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + request: CreateScoreRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[CreateScoreResponse]: + """ + Create a score (supports both trace and session scores) + + Parameters + ---------- + request : CreateScoreRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[CreateScoreResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/scores", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateScoreRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CreateScoreResponse, + parse_obj_as( + type_=CreateScoreResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete( + self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a score (supports both trace and session scores) + + Parameters + ---------- + score_id : str + The unique langfuse identifier of a score + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/scores/{jsonable_encoder(score_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/score/types/__init__.py b/langfuse/api/resources/score/types/__init__.py index 72d61f6f3..4a759a978 100644 --- a/langfuse/api/resources/score/types/__init__.py +++ b/langfuse/api/resources/score/types/__init__.py @@ -1,6 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .create_score_request import CreateScoreRequest -from .create_score_response import CreateScoreResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_score_request import CreateScoreRequest + from .create_score_response import CreateScoreResponse +_dynamic_imports: typing.Dict[str, str] = { + "CreateScoreRequest": ".create_score_request", + "CreateScoreResponse": ".create_score_response", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateScoreRequest", "CreateScoreResponse"] diff --git a/langfuse/api/resources/score/types/create_score_request.py b/langfuse/api/resources/score/types/create_score_request.py index 1f79f4a64..2f6f70a0e 100644 --- a/langfuse/api/resources/score/types/create_score_request.py +++ b/langfuse/api/resources/score/types/create_score_request.py @@ -1,19 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.create_score_value import CreateScoreValue from ...commons.types.score_data_type import ScoreDataType -class CreateScoreRequest(pydantic_v1.BaseModel): +class CreateScoreRequest(UniversalBaseModel): """ Examples -------- - from langfuse import CreateScoreRequest + from langfuse.resources.score import CreateScoreRequest CreateScoreRequest( name="novelty", @@ -23,75 +24,59 @@ class CreateScoreRequest(pydantic_v1.BaseModel): """ id: typing.Optional[str] = None - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str - value: CreateScoreValue = pydantic_v1.Field() + value: CreateScoreValue = pydantic.Field() """ The value of the score. Must be passed as string for categorical scores, and numeric for boolean and numeric scores. Boolean score values must equal either 1 or 0 (true or false) """ comment: typing.Optional[str] = None metadata: typing.Optional[typing.Dict[str, typing.Any]] = None - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment of the score. Can be any lowercase alphanumeric string with hyphens and underscores that does not start with 'langfuse'. """ - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = pydantic.Field(default=None) """ The annotation queue referenced by the score. Indicates if score was initially created while processing annotation queue. """ - data_type: typing.Optional[ScoreDataType] = pydantic_v1.Field( - alias="dataType", default=None - ) + data_type: typing_extensions.Annotated[ + typing.Optional[ScoreDataType], FieldMetadata(alias="dataType") + ] = pydantic.Field(default=None) """ The data type of the score. When passing a configId this field is inferred. Otherwise, this field must be passed or will default to numeric. """ - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = pydantic.Field(default=None) """ Reference a score config on a score. The unique langfuse identifier of a score config. When passing this field, the dataType and stringValue fields are automatically populated. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score/types/create_score_response.py b/langfuse/api/resources/score/types/create_score_response.py index a8c90fce2..d61869480 100644 --- a/langfuse/api/resources/score/types/create_score_response.py +++ b/langfuse/api/resources/score/types/create_score_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class CreateScoreResponse(pydantic_v1.BaseModel): - id: str = pydantic_v1.Field() +class CreateScoreResponse(UniversalBaseModel): + id: str = pydantic.Field() """ The id of the created object in Langfuse """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_configs/__init__.py b/langfuse/api/resources/score_configs/__init__.py index da401d35d..16f409522 100644 --- a/langfuse/api/resources/score_configs/__init__.py +++ b/langfuse/api/resources/score_configs/__init__.py @@ -1,5 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .types import CreateScoreConfigRequest, ScoreConfigs, UpdateScoreConfigRequest +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import CreateScoreConfigRequest, ScoreConfigs, UpdateScoreConfigRequest +_dynamic_imports: typing.Dict[str, str] = { + "CreateScoreConfigRequest": ".types", + "ScoreConfigs": ".types", + "UpdateScoreConfigRequest": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateScoreConfigRequest", "ScoreConfigs", "UpdateScoreConfigRequest"] diff --git a/langfuse/api/resources/score_configs/client.py b/langfuse/api/resources/score_configs/client.py index 7faea8312..a771cde68 100644 --- a/langfuse/api/resources/score_configs/client.py +++ b/langfuse/api/resources/score_configs/client.py @@ -1,19 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.score_config import ScoreConfig +from .raw_client import AsyncRawScoreConfigsClient, RawScoreConfigsClient from .types.create_score_config_request import CreateScoreConfigRequest from .types.score_configs import ScoreConfigs from .types.update_score_config_request import UpdateScoreConfigRequest @@ -24,7 +16,18 @@ class ScoreConfigsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawScoreConfigsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawScoreConfigsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawScoreConfigsClient + """ + return self._raw_client def create( self, @@ -48,8 +51,8 @@ def create( Examples -------- - from langfuse import CreateScoreConfigRequest, ScoreDataType - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.score_configs import CreateScoreConfigRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -62,42 +65,14 @@ def create( client.score_configs.create( request=CreateScoreConfigRequest( name="name", - data_type=ScoreDataType.NUMERIC, + data_type="NUMERIC", ), ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/score-configs", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, @@ -126,7 +101,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -138,37 +113,10 @@ def get( ) client.score_configs.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/score-configs", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = self._raw_client.get( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfigs, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_by_id( self, config_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -190,7 +138,7 @@ def get_by_id( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -204,36 +152,10 @@ def get_by_id( config_id="configId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/score-configs/{jsonable_encoder(config_id)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get_by_id( + config_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def update( self, @@ -261,8 +183,8 @@ def update( Examples -------- - from langfuse import UpdateScoreConfigRequest - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse + from langfuse.resources.score_configs import UpdateScoreConfigRequest client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -277,43 +199,26 @@ def update( request=UpdateScoreConfigRequest(), ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/score-configs/{jsonable_encoder(config_id)}", - method="PATCH", - json=request, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update( + config_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncScoreConfigsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawScoreConfigsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawScoreConfigsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawScoreConfigsClient + """ + return self._raw_client async def create( self, @@ -339,8 +244,8 @@ async def create( -------- import asyncio - from langfuse import CreateScoreConfigRequest, ScoreDataType - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.score_configs import CreateScoreConfigRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -356,45 +261,17 @@ async def main() -> None: await client.score_configs.create( request=CreateScoreConfigRequest( name="name", - data_type=ScoreDataType.NUMERIC, + data_type="NUMERIC", ), ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/score-configs", - method="POST", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, @@ -425,7 +302,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -443,37 +320,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/score-configs", - method="GET", - params={"page": page, "limit": limit}, - request_options=request_options, + _response = await self._raw_client.get( + page=page, limit=limit, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfigs, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_by_id( self, config_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -497,7 +347,7 @@ async def get_by_id( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -517,36 +367,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/score-configs/{jsonable_encoder(config_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_by_id( + config_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def update( self, @@ -576,8 +400,8 @@ async def update( -------- import asyncio - from langfuse import UpdateScoreConfigRequest - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse + from langfuse.resources.score_configs import UpdateScoreConfigRequest client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -598,35 +422,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/score-configs/{jsonable_encoder(config_id)}", - method="PATCH", - json=request, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update( + config_id, request=request, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScoreConfig, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/score_configs/raw_client.py b/langfuse/api/resources/score_configs/raw_client.py new file mode 100644 index 000000000..53b4312ad --- /dev/null +++ b/langfuse/api/resources/score_configs/raw_client.py @@ -0,0 +1,896 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.score_config import ScoreConfig +from .types.create_score_config_request import CreateScoreConfigRequest +from .types.score_configs import ScoreConfigs +from .types.update_score_config_request import UpdateScoreConfigRequest + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawScoreConfigsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + request: CreateScoreConfigRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ScoreConfig]: + """ + Create a score configuration (config). Score configs are used to define the structure of scores + + Parameters + ---------- + request : CreateScoreConfigRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScoreConfig] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/score-configs", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateScoreConfigRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfig, + parse_obj_as( + type_=ScoreConfig, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ScoreConfigs]: + """ + Get all score configs + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScoreConfigs] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/score-configs", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfigs, + parse_obj_as( + type_=ScoreConfigs, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_by_id( + self, config_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ScoreConfig]: + """ + Get a score config + + Parameters + ---------- + config_id : str + The unique langfuse identifier of a score config + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScoreConfig] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/score-configs/{jsonable_encoder(config_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfig, + parse_obj_as( + type_=ScoreConfig, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def update( + self, + config_id: str, + *, + request: UpdateScoreConfigRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ScoreConfig]: + """ + Update a score config + + Parameters + ---------- + config_id : str + The unique langfuse identifier of a score config + + request : UpdateScoreConfigRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ScoreConfig] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/score-configs/{jsonable_encoder(config_id)}", + method="PATCH", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=UpdateScoreConfigRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfig, + parse_obj_as( + type_=ScoreConfig, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawScoreConfigsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + request: CreateScoreConfigRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ScoreConfig]: + """ + Create a score configuration (config). Score configs are used to define the structure of scores + + Parameters + ---------- + request : CreateScoreConfigRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScoreConfig] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/score-configs", + method="POST", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=CreateScoreConfigRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfig, + parse_obj_as( + type_=ScoreConfig, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ScoreConfigs]: + """ + Get all score configs + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScoreConfigs] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/score-configs", + method="GET", + params={ + "page": page, + "limit": limit, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfigs, + parse_obj_as( + type_=ScoreConfigs, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_by_id( + self, config_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ScoreConfig]: + """ + Get a score config + + Parameters + ---------- + config_id : str + The unique langfuse identifier of a score config + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScoreConfig] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/score-configs/{jsonable_encoder(config_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfig, + parse_obj_as( + type_=ScoreConfig, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def update( + self, + config_id: str, + *, + request: UpdateScoreConfigRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ScoreConfig]: + """ + Update a score config + + Parameters + ---------- + config_id : str + The unique langfuse identifier of a score config + + request : UpdateScoreConfigRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ScoreConfig] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/score-configs/{jsonable_encoder(config_id)}", + method="PATCH", + json=convert_and_respect_annotation_metadata( + object_=request, annotation=UpdateScoreConfigRequest, direction="write" + ), + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ScoreConfig, + parse_obj_as( + type_=ScoreConfig, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/score_configs/types/__init__.py b/langfuse/api/resources/score_configs/types/__init__.py index 1c328b614..10ef4f679 100644 --- a/langfuse/api/resources/score_configs/types/__init__.py +++ b/langfuse/api/resources/score_configs/types/__init__.py @@ -1,7 +1,46 @@ # This file was auto-generated by Fern from our API Definition. -from .create_score_config_request import CreateScoreConfigRequest -from .score_configs import ScoreConfigs -from .update_score_config_request import UpdateScoreConfigRequest +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .create_score_config_request import CreateScoreConfigRequest + from .score_configs import ScoreConfigs + from .update_score_config_request import UpdateScoreConfigRequest +_dynamic_imports: typing.Dict[str, str] = { + "CreateScoreConfigRequest": ".create_score_config_request", + "ScoreConfigs": ".score_configs", + "UpdateScoreConfigRequest": ".update_score_config_request", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["CreateScoreConfigRequest", "ScoreConfigs", "UpdateScoreConfigRequest"] diff --git a/langfuse/api/resources/score_configs/types/create_score_config_request.py b/langfuse/api/resources/score_configs/types/create_score_config_request.py index e136af157..37d5015e0 100644 --- a/langfuse/api/resources/score_configs/types/create_score_config_request.py +++ b/langfuse/api/resources/score_configs/types/create_score_config_request.py @@ -1,72 +1,53 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.config_category import ConfigCategory from ...commons.types.score_data_type import ScoreDataType -class CreateScoreConfigRequest(pydantic_v1.BaseModel): +class CreateScoreConfigRequest(UniversalBaseModel): name: str - data_type: ScoreDataType = pydantic_v1.Field(alias="dataType") - categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field( + data_type: typing_extensions.Annotated[ + ScoreDataType, FieldMetadata(alias="dataType") + ] + categories: typing.Optional[typing.List[ConfigCategory]] = pydantic.Field( default=None ) """ Configure custom categories for categorical scores. Pass a list of objects with `label` and `value` properties. Categories are autogenerated for boolean configs and cannot be passed """ - min_value: typing.Optional[float] = pydantic_v1.Field( - alias="minValue", default=None - ) + min_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="minValue") + ] = pydantic.Field(default=None) """ Configure a minimum value for numerical scores. If not set, the minimum value defaults to -∞ """ - max_value: typing.Optional[float] = pydantic_v1.Field( - alias="maxValue", default=None - ) + max_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="maxValue") + ] = pydantic.Field(default=None) """ Configure a maximum value for numerical scores. If not set, the maximum value defaults to +∞ """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description is shown across the Langfuse UI and can be used to e.g. explain the config categories in detail, why a numeric range was set, or provide additional context on config name or usage """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_configs/types/score_configs.py b/langfuse/api/resources/score_configs/types/score_configs.py index fc84e28a3..09617f5d2 100644 --- a/langfuse/api/resources/score_configs/types/score_configs.py +++ b/langfuse/api/resources/score_configs/types/score_configs.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.score_config import ScoreConfig from ...utils.resources.pagination.types.meta_response import MetaResponse -class ScoreConfigs(pydantic_v1.BaseModel): +class ScoreConfigs(UniversalBaseModel): data: typing.List[ScoreConfig] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_configs/types/update_score_config_request.py b/langfuse/api/resources/score_configs/types/update_score_config_request.py index ce5f980b8..b12b6305e 100644 --- a/langfuse/api/resources/score_configs/types/update_score_config_request.py +++ b/langfuse/api/resources/score_configs/types/update_score_config_request.py @@ -1,81 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.config_category import ConfigCategory -class UpdateScoreConfigRequest(pydantic_v1.BaseModel): - is_archived: typing.Optional[bool] = pydantic_v1.Field( - alias="isArchived", default=None - ) +class UpdateScoreConfigRequest(UniversalBaseModel): + is_archived: typing_extensions.Annotated[ + typing.Optional[bool], FieldMetadata(alias="isArchived") + ] = pydantic.Field(default=None) """ The status of the score config showing if it is archived or not """ - name: typing.Optional[str] = pydantic_v1.Field(default=None) + name: typing.Optional[str] = pydantic.Field(default=None) """ The name of the score config """ - categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field( + categories: typing.Optional[typing.List[ConfigCategory]] = pydantic.Field( default=None ) """ Configure custom categories for categorical scores. Pass a list of objects with `label` and `value` properties. Categories are autogenerated for boolean configs and cannot be passed """ - min_value: typing.Optional[float] = pydantic_v1.Field( - alias="minValue", default=None - ) + min_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="minValue") + ] = pydantic.Field(default=None) """ Configure a minimum value for numerical scores. If not set, the minimum value defaults to -∞ """ - max_value: typing.Optional[float] = pydantic_v1.Field( - alias="maxValue", default=None - ) + max_value: typing_extensions.Annotated[ + typing.Optional[float], FieldMetadata(alias="maxValue") + ] = pydantic.Field(default=None) """ Configure a maximum value for numerical scores. If not set, the maximum value defaults to +∞ """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description is shown across the Langfuse UI and can be used to e.g. explain the config categories in detail, why a numeric range was set, or provide additional context on config name or usage """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_v_2/__init__.py b/langfuse/api/resources/score_v_2/__init__.py index 40599eec1..2d0fcec36 100644 --- a/langfuse/api/resources/score_v_2/__init__.py +++ b/langfuse/api/resources/score_v_2/__init__.py @@ -1,16 +1,61 @@ # This file was auto-generated by Fern from our API Definition. -from .types import ( - GetScoresResponse, - GetScoresResponseData, - GetScoresResponseDataBoolean, - GetScoresResponseDataCategorical, - GetScoresResponseDataNumeric, - GetScoresResponseData_Boolean, - GetScoresResponseData_Categorical, - GetScoresResponseData_Numeric, - GetScoresResponseTraceData, -) +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ( + GetScoresResponse, + GetScoresResponseData, + GetScoresResponseDataBoolean, + GetScoresResponseDataCategorical, + GetScoresResponseDataNumeric, + GetScoresResponseData_Boolean, + GetScoresResponseData_Categorical, + GetScoresResponseData_Numeric, + GetScoresResponseTraceData, + ) +_dynamic_imports: typing.Dict[str, str] = { + "GetScoresResponse": ".types", + "GetScoresResponseData": ".types", + "GetScoresResponseDataBoolean": ".types", + "GetScoresResponseDataCategorical": ".types", + "GetScoresResponseDataNumeric": ".types", + "GetScoresResponseData_Boolean": ".types", + "GetScoresResponseData_Categorical": ".types", + "GetScoresResponseData_Numeric": ".types", + "GetScoresResponseTraceData": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "GetScoresResponse", diff --git a/langfuse/api/resources/score_v_2/client.py b/langfuse/api/resources/score_v_2/client.py index e927b6c2b..ea9ce436d 100644 --- a/langfuse/api/resources/score_v_2/client.py +++ b/langfuse/api/resources/score_v_2/client.py @@ -2,28 +2,30 @@ import datetime as dt import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.datetime_utils import serialize_datetime -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.score import Score from ..commons.types.score_data_type import ScoreDataType from ..commons.types.score_source import ScoreSource +from .raw_client import AsyncRawScoreV2Client, RawScoreV2Client from .types.get_scores_response import GetScoresResponse class ScoreV2Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawScoreV2Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawScoreV2Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawScoreV2Client + """ + return self._raw_client def get( self, @@ -108,7 +110,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -120,58 +122,26 @@ def get( ) client.score_v_2.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/v2/scores", - method="GET", - params={ - "page": page, - "limit": limit, - "userId": user_id, - "name": name, - "fromTimestamp": serialize_datetime(from_timestamp) - if from_timestamp is not None - else None, - "toTimestamp": serialize_datetime(to_timestamp) - if to_timestamp is not None - else None, - "environment": environment, - "source": source, - "operator": operator, - "value": value, - "scoreIds": score_ids, - "configId": config_id, - "sessionId": session_id, - "queueId": queue_id, - "dataType": data_type, - "traceTags": trace_tags, - }, + _response = self._raw_client.get( + page=page, + limit=limit, + user_id=user_id, + name=name, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + environment=environment, + source=source, + operator=operator, + value=value, + score_ids=score_ids, + config_id=config_id, + session_id=session_id, + queue_id=queue_id, + data_type=data_type, + trace_tags=trace_tags, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetScoresResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_by_id( self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -193,7 +163,7 @@ def get_by_id( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -207,41 +177,26 @@ def get_by_id( score_id="scoreId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/v2/scores/{jsonable_encoder(score_id)}", - method="GET", - request_options=request_options, + _response = self._raw_client.get_by_id( + score_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Score, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncScoreV2Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawScoreV2Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawScoreV2Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawScoreV2Client + """ + return self._raw_client async def get( self, @@ -328,7 +283,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -346,58 +301,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/scores", - method="GET", - params={ - "page": page, - "limit": limit, - "userId": user_id, - "name": name, - "fromTimestamp": serialize_datetime(from_timestamp) - if from_timestamp is not None - else None, - "toTimestamp": serialize_datetime(to_timestamp) - if to_timestamp is not None - else None, - "environment": environment, - "source": source, - "operator": operator, - "value": value, - "scoreIds": score_ids, - "configId": config_id, - "sessionId": session_id, - "queueId": queue_id, - "dataType": data_type, - "traceTags": trace_tags, - }, + _response = await self._raw_client.get( + page=page, + limit=limit, + user_id=user_id, + name=name, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + environment=environment, + source=source, + operator=operator, + value=value, + score_ids=score_ids, + config_id=config_id, + session_id=session_id, + queue_id=queue_id, + data_type=data_type, + trace_tags=trace_tags, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetScoresResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_by_id( self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -421,7 +344,7 @@ async def get_by_id( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -441,33 +364,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/v2/scores/{jsonable_encoder(score_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get_by_id( + score_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Score, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/score_v_2/raw_client.py b/langfuse/api/resources/score_v_2/raw_client.py new file mode 100644 index 000000000..c613fff79 --- /dev/null +++ b/langfuse/api/resources/score_v_2/raw_client.py @@ -0,0 +1,606 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.datetime_utils import serialize_datetime +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.score import Score +from ..commons.types.score_data_type import ScoreDataType +from ..commons.types.score_source import ScoreSource +from .types.get_scores_response import GetScoresResponse + + +class RawScoreV2Client: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + user_id: typing.Optional[str] = None, + name: typing.Optional[str] = None, + from_timestamp: typing.Optional[dt.datetime] = None, + to_timestamp: typing.Optional[dt.datetime] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + source: typing.Optional[ScoreSource] = None, + operator: typing.Optional[str] = None, + value: typing.Optional[float] = None, + score_ids: typing.Optional[str] = None, + config_id: typing.Optional[str] = None, + session_id: typing.Optional[str] = None, + queue_id: typing.Optional[str] = None, + data_type: typing.Optional[ScoreDataType] = None, + trace_tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GetScoresResponse]: + """ + Get a list of scores (supports both trace and session scores) + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + user_id : typing.Optional[str] + Retrieve only scores with this userId associated to the trace. + + name : typing.Optional[str] + Retrieve only scores with this name. + + from_timestamp : typing.Optional[dt.datetime] + Optional filter to only include scores created on or after a certain datetime (ISO 8601) + + to_timestamp : typing.Optional[dt.datetime] + Optional filter to only include scores created before a certain datetime (ISO 8601) + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for scores where the environment is one of the provided values. + + source : typing.Optional[ScoreSource] + Retrieve only scores from a specific source. + + operator : typing.Optional[str] + Retrieve only scores with value. + + value : typing.Optional[float] + Retrieve only scores with value. + + score_ids : typing.Optional[str] + Comma-separated list of score IDs to limit the results to. + + config_id : typing.Optional[str] + Retrieve only scores with a specific configId. + + session_id : typing.Optional[str] + Retrieve only scores with a specific sessionId. + + queue_id : typing.Optional[str] + Retrieve only scores with a specific annotation queueId. + + data_type : typing.Optional[ScoreDataType] + Retrieve only scores with a specific dataType. + + trace_tags : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Only scores linked to traces that include all of these tags will be returned. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GetScoresResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/v2/scores", + method="GET", + params={ + "page": page, + "limit": limit, + "userId": user_id, + "name": name, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, + "environment": environment, + "source": source, + "operator": operator, + "value": value, + "scoreIds": score_ids, + "configId": config_id, + "sessionId": session_id, + "queueId": queue_id, + "dataType": data_type, + "traceTags": trace_tags, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetScoresResponse, + parse_obj_as( + type_=GetScoresResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get_by_id( + self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Score]: + """ + Get a score (supports both trace and session scores) + + Parameters + ---------- + score_id : str + The unique langfuse identifier of a score + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Score] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/v2/scores/{jsonable_encoder(score_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Score, + parse_obj_as( + type_=Score, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawScoreV2Client: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + user_id: typing.Optional[str] = None, + name: typing.Optional[str] = None, + from_timestamp: typing.Optional[dt.datetime] = None, + to_timestamp: typing.Optional[dt.datetime] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + source: typing.Optional[ScoreSource] = None, + operator: typing.Optional[str] = None, + value: typing.Optional[float] = None, + score_ids: typing.Optional[str] = None, + config_id: typing.Optional[str] = None, + session_id: typing.Optional[str] = None, + queue_id: typing.Optional[str] = None, + data_type: typing.Optional[ScoreDataType] = None, + trace_tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GetScoresResponse]: + """ + Get a list of scores (supports both trace and session scores) + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1. + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + user_id : typing.Optional[str] + Retrieve only scores with this userId associated to the trace. + + name : typing.Optional[str] + Retrieve only scores with this name. + + from_timestamp : typing.Optional[dt.datetime] + Optional filter to only include scores created on or after a certain datetime (ISO 8601) + + to_timestamp : typing.Optional[dt.datetime] + Optional filter to only include scores created before a certain datetime (ISO 8601) + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for scores where the environment is one of the provided values. + + source : typing.Optional[ScoreSource] + Retrieve only scores from a specific source. + + operator : typing.Optional[str] + Retrieve only scores with value. + + value : typing.Optional[float] + Retrieve only scores with value. + + score_ids : typing.Optional[str] + Comma-separated list of score IDs to limit the results to. + + config_id : typing.Optional[str] + Retrieve only scores with a specific configId. + + session_id : typing.Optional[str] + Retrieve only scores with a specific sessionId. + + queue_id : typing.Optional[str] + Retrieve only scores with a specific annotation queueId. + + data_type : typing.Optional[ScoreDataType] + Retrieve only scores with a specific dataType. + + trace_tags : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Only scores linked to traces that include all of these tags will be returned. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GetScoresResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/v2/scores", + method="GET", + params={ + "page": page, + "limit": limit, + "userId": user_id, + "name": name, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, + "environment": environment, + "source": source, + "operator": operator, + "value": value, + "scoreIds": score_ids, + "configId": config_id, + "sessionId": session_id, + "queueId": queue_id, + "dataType": data_type, + "traceTags": trace_tags, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GetScoresResponse, + parse_obj_as( + type_=GetScoresResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get_by_id( + self, score_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Score]: + """ + Get a score (supports both trace and session scores) + + Parameters + ---------- + score_id : str + The unique langfuse identifier of a score + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Score] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/v2/scores/{jsonable_encoder(score_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Score, + parse_obj_as( + type_=Score, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/score_v_2/types/__init__.py b/langfuse/api/resources/score_v_2/types/__init__.py index 480ed3406..d54db47b8 100644 --- a/langfuse/api/resources/score_v_2/types/__init__.py +++ b/langfuse/api/resources/score_v_2/types/__init__.py @@ -1,16 +1,61 @@ # This file was auto-generated by Fern from our API Definition. -from .get_scores_response import GetScoresResponse -from .get_scores_response_data import ( - GetScoresResponseData, - GetScoresResponseData_Boolean, - GetScoresResponseData_Categorical, - GetScoresResponseData_Numeric, -) -from .get_scores_response_data_boolean import GetScoresResponseDataBoolean -from .get_scores_response_data_categorical import GetScoresResponseDataCategorical -from .get_scores_response_data_numeric import GetScoresResponseDataNumeric -from .get_scores_response_trace_data import GetScoresResponseTraceData +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .get_scores_response import GetScoresResponse + from .get_scores_response_data import ( + GetScoresResponseData, + GetScoresResponseData_Boolean, + GetScoresResponseData_Categorical, + GetScoresResponseData_Numeric, + ) + from .get_scores_response_data_boolean import GetScoresResponseDataBoolean + from .get_scores_response_data_categorical import GetScoresResponseDataCategorical + from .get_scores_response_data_numeric import GetScoresResponseDataNumeric + from .get_scores_response_trace_data import GetScoresResponseTraceData +_dynamic_imports: typing.Dict[str, str] = { + "GetScoresResponse": ".get_scores_response", + "GetScoresResponseData": ".get_scores_response_data", + "GetScoresResponseDataBoolean": ".get_scores_response_data_boolean", + "GetScoresResponseDataCategorical": ".get_scores_response_data_categorical", + "GetScoresResponseDataNumeric": ".get_scores_response_data_numeric", + "GetScoresResponseData_Boolean": ".get_scores_response_data", + "GetScoresResponseData_Categorical": ".get_scores_response_data", + "GetScoresResponseData_Numeric": ".get_scores_response_data", + "GetScoresResponseTraceData": ".get_scores_response_trace_data", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = [ "GetScoresResponse", diff --git a/langfuse/api/resources/score_v_2/types/get_scores_response.py b/langfuse/api/resources/score_v_2/types/get_scores_response.py index 777bb799b..72fa7060a 100644 --- a/langfuse/api/resources/score_v_2/types/get_scores_response.py +++ b/langfuse/api/resources/score_v_2/types/get_scores_response.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...utils.resources.pagination.types.meta_response import MetaResponse from .get_scores_response_data import GetScoresResponseData -class GetScoresResponse(pydantic_v1.BaseModel): +class GetScoresResponse(UniversalBaseModel): data: typing.List[GetScoresResponseData] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_v_2/types/get_scores_response_data.py b/langfuse/api/resources/score_v_2/types/get_scores_response_data.py index e09f31cb9..2604120d8 100644 --- a/langfuse/api/resources/score_v_2/types/get_scores_response_data.py +++ b/langfuse/api/resources/score_v_2/types/get_scores_response_data.py @@ -5,211 +5,180 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata from ...commons.types.score_source import ScoreSource from .get_scores_response_trace_data import GetScoresResponseTraceData -class GetScoresResponseData_Numeric(pydantic_v1.BaseModel): +class GetScoresResponseData_Numeric(UniversalBaseModel): + data_type: typing_extensions.Annotated[ + typing.Literal["NUMERIC"], FieldMetadata(alias="dataType") + ] = "NUMERIC" trace: typing.Optional[GetScoresResponseTraceData] = None value: float id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = None + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = None environment: typing.Optional[str] = None - data_type: typing.Literal["NUMERIC"] = pydantic_v1.Field( - alias="dataType", default="NUMERIC" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class GetScoresResponseData_Categorical(pydantic_v1.BaseModel): + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class GetScoresResponseData_Categorical(UniversalBaseModel): + data_type: typing_extensions.Annotated[ + typing.Literal["CATEGORICAL"], FieldMetadata(alias="dataType") + ] = "CATEGORICAL" trace: typing.Optional[GetScoresResponseTraceData] = None value: float - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[str, FieldMetadata(alias="stringValue")] id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = None + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = None environment: typing.Optional[str] = None - data_type: typing.Literal["CATEGORICAL"] = pydantic_v1.Field( - alias="dataType", default="CATEGORICAL" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class GetScoresResponseData_Boolean(pydantic_v1.BaseModel): + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class GetScoresResponseData_Boolean(UniversalBaseModel): + data_type: typing_extensions.Annotated[ + typing.Literal["BOOLEAN"], FieldMetadata(alias="dataType") + ] = "BOOLEAN" trace: typing.Optional[GetScoresResponseTraceData] = None value: float - string_value: str = pydantic_v1.Field(alias="stringValue") + string_value: typing_extensions.Annotated[str, FieldMetadata(alias="stringValue")] id: str - trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) - session_id: typing.Optional[str] = pydantic_v1.Field( - alias="sessionId", default=None - ) - observation_id: typing.Optional[str] = pydantic_v1.Field( - alias="observationId", default=None - ) - dataset_run_id: typing.Optional[str] = pydantic_v1.Field( - alias="datasetRunId", default=None - ) + trace_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="traceId") + ] = None + session_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="sessionId") + ] = None + observation_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="observationId") + ] = None + dataset_run_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="datasetRunId") + ] = None name: str source: ScoreSource timestamp: dt.datetime - created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") - updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") - author_user_id: typing.Optional[str] = pydantic_v1.Field( - alias="authorUserId", default=None - ) + created_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="createdAt") + ] + updated_at: typing_extensions.Annotated[ + dt.datetime, FieldMetadata(alias="updatedAt") + ] + author_user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="authorUserId") + ] = None comment: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None - config_id: typing.Optional[str] = pydantic_v1.Field(alias="configId", default=None) - queue_id: typing.Optional[str] = pydantic_v1.Field(alias="queueId", default=None) + config_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="configId") + ] = None + queue_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="queueId") + ] = None environment: typing.Optional[str] = None - data_type: typing.Literal["BOOLEAN"] = pydantic_v1.Field( - alias="dataType", default="BOOLEAN" - ) - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -GetScoresResponseData = typing.Union[ - GetScoresResponseData_Numeric, - GetScoresResponseData_Categorical, - GetScoresResponseData_Boolean, + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +GetScoresResponseData = typing_extensions.Annotated[ + typing.Union[ + GetScoresResponseData_Numeric, + GetScoresResponseData_Categorical, + GetScoresResponseData_Boolean, + ], + pydantic.Field(discriminator="dataType"), ] diff --git a/langfuse/api/resources/score_v_2/types/get_scores_response_data_boolean.py b/langfuse/api/resources/score_v_2/types/get_scores_response_data_boolean.py index 48012990c..91803b521 100644 --- a/langfuse/api/resources/score_v_2/types/get_scores_response_data_boolean.py +++ b/langfuse/api/resources/score_v_2/types/get_scores_response_data_boolean.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from ...commons.types.boolean_score import BooleanScore from .get_scores_response_trace_data import GetScoresResponseTraceData @@ -12,35 +11,13 @@ class GetScoresResponseDataBoolean(BooleanScore): trace: typing.Optional[GetScoresResponseTraceData] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_v_2/types/get_scores_response_data_categorical.py b/langfuse/api/resources/score_v_2/types/get_scores_response_data_categorical.py index 6e27f6d64..87cc4f6b7 100644 --- a/langfuse/api/resources/score_v_2/types/get_scores_response_data_categorical.py +++ b/langfuse/api/resources/score_v_2/types/get_scores_response_data_categorical.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from ...commons.types.categorical_score import CategoricalScore from .get_scores_response_trace_data import GetScoresResponseTraceData @@ -12,35 +11,13 @@ class GetScoresResponseDataCategorical(CategoricalScore): trace: typing.Optional[GetScoresResponseTraceData] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_v_2/types/get_scores_response_data_numeric.py b/langfuse/api/resources/score_v_2/types/get_scores_response_data_numeric.py index f7342833f..040c467b9 100644 --- a/langfuse/api/resources/score_v_2/types/get_scores_response_data_numeric.py +++ b/langfuse/api/resources/score_v_2/types/get_scores_response_data_numeric.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 from ...commons.types.numeric_score import NumericScore from .get_scores_response_trace_data import GetScoresResponseTraceData @@ -12,35 +11,13 @@ class GetScoresResponseDataNumeric(NumericScore): trace: typing.Optional[GetScoresResponseTraceData] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/score_v_2/types/get_scores_response_trace_data.py b/langfuse/api/resources/score_v_2/types/get_scores_response_trace_data.py index 6e5539e35..d9ffde6db 100644 --- a/langfuse/api/resources/score_v_2/types/get_scores_response_trace_data.py +++ b/langfuse/api/resources/score_v_2/types/get_scores_response_trace_data.py @@ -1,57 +1,38 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.serialization import FieldMetadata -class GetScoresResponseTraceData(pydantic_v1.BaseModel): - user_id: typing.Optional[str] = pydantic_v1.Field(alias="userId", default=None) +class GetScoresResponseTraceData(UniversalBaseModel): + user_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="userId") + ] = pydantic.Field(default=None) """ The user ID associated with the trace referenced by score """ - tags: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + tags: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ A list of tags associated with the trace referenced by score """ - environment: typing.Optional[str] = pydantic_v1.Field(default=None) + environment: typing.Optional[str] = pydantic.Field(default=None) """ The environment of the trace referenced by score """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/sessions/__init__.py b/langfuse/api/resources/sessions/__init__.py index 048704297..89b7e63bc 100644 --- a/langfuse/api/resources/sessions/__init__.py +++ b/langfuse/api/resources/sessions/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .types import PaginatedSessions +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import PaginatedSessions +_dynamic_imports: typing.Dict[str, str] = {"PaginatedSessions": ".types"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["PaginatedSessions"] diff --git a/langfuse/api/resources/sessions/client.py b/langfuse/api/resources/sessions/client.py index d5ae779c3..f6f9ac0af 100644 --- a/langfuse/api/resources/sessions/client.py +++ b/langfuse/api/resources/sessions/client.py @@ -2,26 +2,28 @@ import datetime as dt import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.datetime_utils import serialize_datetime -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.session_with_traces import SessionWithTraces +from .raw_client import AsyncRawSessionsClient, RawSessionsClient from .types.paginated_sessions import PaginatedSessions class SessionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawSessionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawSessionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawSessionsClient + """ + return self._raw_client def list( self, @@ -62,7 +64,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -74,47 +76,15 @@ def list( ) client.sessions.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/sessions", - method="GET", - params={ - "page": page, - "limit": limit, - "fromTimestamp": serialize_datetime(from_timestamp) - if from_timestamp is not None - else None, - "toTimestamp": serialize_datetime(to_timestamp) - if to_timestamp is not None - else None, - "environment": environment, - }, + _response = self._raw_client.list( + page=page, + limit=limit, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + environment=environment, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSessions, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, @@ -139,7 +109,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -153,41 +123,24 @@ def get( session_id="sessionId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/sessions/{jsonable_encoder(session_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(SessionWithTraces, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(session_id, request_options=request_options) + return _response.data class AsyncSessionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawSessionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawSessionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawSessionsClient + """ + return self._raw_client async def list( self, @@ -230,7 +183,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -248,47 +201,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/sessions", - method="GET", - params={ - "page": page, - "limit": limit, - "fromTimestamp": serialize_datetime(from_timestamp) - if from_timestamp is not None - else None, - "toTimestamp": serialize_datetime(to_timestamp) - if to_timestamp is not None - else None, - "environment": environment, - }, + _response = await self._raw_client.list( + page=page, + limit=limit, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + environment=environment, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSessions, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, @@ -315,7 +236,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -335,33 +256,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/sessions/{jsonable_encoder(session_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get( + session_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(SessionWithTraces, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/sessions/raw_client.py b/langfuse/api/resources/sessions/raw_client.py new file mode 100644 index 000000000..0eec08a91 --- /dev/null +++ b/langfuse/api/resources/sessions/raw_client.py @@ -0,0 +1,500 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.datetime_utils import serialize_datetime +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.session_with_traces import SessionWithTraces +from .types.paginated_sessions import PaginatedSessions + + +class RawSessionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + from_timestamp: typing.Optional[dt.datetime] = None, + to_timestamp: typing.Optional[dt.datetime] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PaginatedSessions]: + """ + Get sessions + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1 + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + from_timestamp : typing.Optional[dt.datetime] + Optional filter to only include sessions created on or after a certain datetime (ISO 8601) + + to_timestamp : typing.Optional[dt.datetime] + Optional filter to only include sessions created before a certain datetime (ISO 8601) + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for sessions where the environment is one of the provided values. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PaginatedSessions] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/sessions", + method="GET", + params={ + "page": page, + "limit": limit, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, + "environment": environment, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedSessions, + parse_obj_as( + type_=PaginatedSessions, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def get( + self, + session_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[SessionWithTraces]: + """ + Get a session. Please note that `traces` on this endpoint are not paginated, if you plan to fetch large sessions, consider `GET /api/public/traces?sessionId=` + + Parameters + ---------- + session_id : str + The unique id of a session + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[SessionWithTraces] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/sessions/{jsonable_encoder(session_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + SessionWithTraces, + parse_obj_as( + type_=SessionWithTraces, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawSessionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + from_timestamp: typing.Optional[dt.datetime] = None, + to_timestamp: typing.Optional[dt.datetime] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PaginatedSessions]: + """ + Get sessions + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1 + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + from_timestamp : typing.Optional[dt.datetime] + Optional filter to only include sessions created on or after a certain datetime (ISO 8601) + + to_timestamp : typing.Optional[dt.datetime] + Optional filter to only include sessions created before a certain datetime (ISO 8601) + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for sessions where the environment is one of the provided values. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PaginatedSessions] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/sessions", + method="GET", + params={ + "page": page, + "limit": limit, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, + "environment": environment, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PaginatedSessions, + parse_obj_as( + type_=PaginatedSessions, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def get( + self, + session_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[SessionWithTraces]: + """ + Get a session. Please note that `traces` on this endpoint are not paginated, if you plan to fetch large sessions, consider `GET /api/public/traces?sessionId=` + + Parameters + ---------- + session_id : str + The unique id of a session + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[SessionWithTraces] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/sessions/{jsonable_encoder(session_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + SessionWithTraces, + parse_obj_as( + type_=SessionWithTraces, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/sessions/types/__init__.py b/langfuse/api/resources/sessions/types/__init__.py index 42d63b428..45f21e139 100644 --- a/langfuse/api/resources/sessions/types/__init__.py +++ b/langfuse/api/resources/sessions/types/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .paginated_sessions import PaginatedSessions +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .paginated_sessions import PaginatedSessions +_dynamic_imports: typing.Dict[str, str] = {"PaginatedSessions": ".paginated_sessions"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["PaginatedSessions"] diff --git a/langfuse/api/resources/sessions/types/paginated_sessions.py b/langfuse/api/resources/sessions/types/paginated_sessions.py index 5dd9fb497..a2d1bc151 100644 --- a/langfuse/api/resources/sessions/types/paginated_sessions.py +++ b/langfuse/api/resources/sessions/types/paginated_sessions.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.session import Session from ...utils.resources.pagination.types.meta_response import MetaResponse -class PaginatedSessions(pydantic_v1.BaseModel): +class PaginatedSessions(UniversalBaseModel): data: typing.List[Session] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/trace/__init__.py b/langfuse/api/resources/trace/__init__.py index 17855e971..fc9889dfa 100644 --- a/langfuse/api/resources/trace/__init__.py +++ b/langfuse/api/resources/trace/__init__.py @@ -1,5 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from .types import DeleteTraceResponse, Sort, Traces +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import DeleteTraceResponse, Sort, Traces +_dynamic_imports: typing.Dict[str, str] = { + "DeleteTraceResponse": ".types", + "Sort": ".types", + "Traces": ".types", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["DeleteTraceResponse", "Sort", "Traces"] diff --git a/langfuse/api/resources/trace/client.py b/langfuse/api/resources/trace/client.py index e1f837f50..7746b4fb8 100644 --- a/langfuse/api/resources/trace/client.py +++ b/langfuse/api/resources/trace/client.py @@ -2,20 +2,11 @@ import datetime as dt import typing -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.datetime_utils import serialize_datetime -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions -from ..commons.errors.access_denied_error import AccessDeniedError -from ..commons.errors.error import Error -from ..commons.errors.method_not_allowed_error import MethodNotAllowedError -from ..commons.errors.not_found_error import NotFoundError -from ..commons.errors.unauthorized_error import UnauthorizedError from ..commons.types.trace_with_full_details import TraceWithFullDetails +from .raw_client import AsyncRawTraceClient, RawTraceClient from .types.delete_trace_response import DeleteTraceResponse from .types.traces import Traces @@ -25,7 +16,18 @@ class TraceClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawTraceClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawTraceClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawTraceClient + """ + return self._raw_client def get( self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -47,7 +49,7 @@ def get( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -61,36 +63,8 @@ def get( trace_id="traceId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/traces/{jsonable_encoder(trace_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TraceWithFullDetails, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(trace_id, request_options=request_options) + return _response.data def delete( self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -112,7 +86,7 @@ def delete( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -126,36 +100,8 @@ def delete( trace_id="traceId", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/public/traces/{jsonable_encoder(trace_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteTraceResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(trace_id, request_options=request_options) + return _response.data def list( self, @@ -333,7 +279,7 @@ def list( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -345,56 +291,24 @@ def list( ) client.trace.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/public/traces", - method="GET", - params={ - "page": page, - "limit": limit, - "userId": user_id, - "name": name, - "sessionId": session_id, - "fromTimestamp": serialize_datetime(from_timestamp) - if from_timestamp is not None - else None, - "toTimestamp": serialize_datetime(to_timestamp) - if to_timestamp is not None - else None, - "orderBy": order_by, - "tags": tags, - "version": version, - "release": release, - "environment": environment, - "fields": fields, - "filter": filter, - }, + _response = self._raw_client.list( + page=page, + limit=limit, + user_id=user_id, + name=name, + session_id=session_id, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + order_by=order_by, + tags=tags, + version=version, + release=release, + environment=environment, + fields=fields, + filter=filter, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Traces, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def delete_multiple( self, @@ -419,7 +333,7 @@ def delete_multiple( Examples -------- - from langfuse.client import FernLangfuse + from langfuse import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -433,43 +347,26 @@ def delete_multiple( trace_ids=["traceIds", "traceIds"], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/public/traces", - method="DELETE", - json={"traceIds": trace_ids}, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.delete_multiple( + trace_ids=trace_ids, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteTraceResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncTraceClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawTraceClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawTraceClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawTraceClient + """ + return self._raw_client async def get( self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -493,7 +390,7 @@ async def get( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -513,36 +410,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/traces/{jsonable_encoder(trace_id)}", - method="GET", - request_options=request_options, + _response = await self._raw_client.get( + trace_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TraceWithFullDetails, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete( self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -566,7 +437,7 @@ async def delete( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -586,36 +457,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/public/traces/{jsonable_encoder(trace_id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.delete( + trace_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteTraceResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list( self, @@ -795,7 +640,7 @@ async def list( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -813,56 +658,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/traces", - method="GET", - params={ - "page": page, - "limit": limit, - "userId": user_id, - "name": name, - "sessionId": session_id, - "fromTimestamp": serialize_datetime(from_timestamp) - if from_timestamp is not None - else None, - "toTimestamp": serialize_datetime(to_timestamp) - if to_timestamp is not None - else None, - "orderBy": order_by, - "tags": tags, - "version": version, - "release": release, - "environment": environment, - "fields": fields, - "filter": filter, - }, + _response = await self._raw_client.list( + page=page, + limit=limit, + user_id=user_id, + name=name, + session_id=session_id, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + order_by=order_by, + tags=tags, + version=version, + release=release, + environment=environment, + fields=fields, + filter=filter, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Traces, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def delete_multiple( self, @@ -889,7 +702,7 @@ async def delete_multiple( -------- import asyncio - from langfuse.client import AsyncFernLangfuse + from langfuse import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -909,35 +722,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/public/traces", - method="DELETE", - json={"traceIds": trace_ids}, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.delete_multiple( + trace_ids=trace_ids, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteTraceResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore - if _response.status_code == 401: - raise UnauthorizedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 403: - raise AccessDeniedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 405: - raise MethodNotAllowedError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - if _response.status_code == 404: - raise NotFoundError( - pydantic_v1.parse_obj_as(typing.Any, _response.json()) - ) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/langfuse/api/resources/trace/raw_client.py b/langfuse/api/resources/trace/raw_client.py new file mode 100644 index 000000000..53af55bfb --- /dev/null +++ b/langfuse/api/resources/trace/raw_client.py @@ -0,0 +1,1208 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.datetime_utils import serialize_datetime +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ..commons.errors.access_denied_error import AccessDeniedError +from ..commons.errors.error import Error +from ..commons.errors.method_not_allowed_error import MethodNotAllowedError +from ..commons.errors.not_found_error import NotFoundError +from ..commons.errors.unauthorized_error import UnauthorizedError +from ..commons.types.trace_with_full_details import TraceWithFullDetails +from .types.delete_trace_response import DeleteTraceResponse +from .types.traces import Traces + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawTraceClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get( + self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[TraceWithFullDetails]: + """ + Get a specific trace + + Parameters + ---------- + trace_id : str + The unique langfuse identifier of a trace + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[TraceWithFullDetails] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/traces/{jsonable_encoder(trace_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + TraceWithFullDetails, + parse_obj_as( + type_=TraceWithFullDetails, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete( + self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[DeleteTraceResponse]: + """ + Delete a specific trace + + Parameters + ---------- + trace_id : str + The unique langfuse identifier of the trace to delete + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeleteTraceResponse] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/public/traces/{jsonable_encoder(trace_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteTraceResponse, + parse_obj_as( + type_=DeleteTraceResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + user_id: typing.Optional[str] = None, + name: typing.Optional[str] = None, + session_id: typing.Optional[str] = None, + from_timestamp: typing.Optional[dt.datetime] = None, + to_timestamp: typing.Optional[dt.datetime] = None, + order_by: typing.Optional[str] = None, + tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + version: typing.Optional[str] = None, + release: typing.Optional[str] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + fields: typing.Optional[str] = None, + filter: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Traces]: + """ + Get list of traces + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1 + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + user_id : typing.Optional[str] + + name : typing.Optional[str] + + session_id : typing.Optional[str] + + from_timestamp : typing.Optional[dt.datetime] + Optional filter to only include traces with a trace.timestamp on or after a certain datetime (ISO 8601) + + to_timestamp : typing.Optional[dt.datetime] + Optional filter to only include traces with a trace.timestamp before a certain datetime (ISO 8601) + + order_by : typing.Optional[str] + Format of the string [field].[asc/desc]. Fields: id, timestamp, name, userId, release, version, public, bookmarked, sessionId. Example: timestamp.asc + + tags : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Only traces that include all of these tags will be returned. + + version : typing.Optional[str] + Optional filter to only include traces with a certain version. + + release : typing.Optional[str] + Optional filter to only include traces with a certain release. + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for traces where the environment is one of the provided values. + + fields : typing.Optional[str] + Comma-separated list of fields to include in the response. Available field groups: 'core' (always included), 'io' (input, output, metadata), 'scores', 'observations', 'metrics'. If not specified, all fields are returned. Example: 'core,scores,metrics'. Note: Excluded 'observations' or 'scores' fields return empty arrays; excluded 'metrics' returns -1 for 'totalCost' and 'latency'. + + filter : typing.Optional[str] + JSON string containing an array of filter conditions. When provided, this takes precedence over query parameter filters (userId, name, sessionId, tags, version, release, environment, fromTimestamp, toTimestamp). + + ## Filter Structure + Each filter condition has the following structure: + ```json + [ + { + "type": string, // Required. One of: "datetime", "string", "number", "stringOptions", "categoryOptions", "arrayOptions", "stringObject", "numberObject", "boolean", "null" + "column": string, // Required. Column to filter on (see available columns below) + "operator": string, // Required. Operator based on type: + // - datetime: ">", "<", ">=", "<=" + // - string: "=", "contains", "does not contain", "starts with", "ends with" + // - stringOptions: "any of", "none of" + // - categoryOptions: "any of", "none of" + // - arrayOptions: "any of", "none of", "all of" + // - number: "=", ">", "<", ">=", "<=" + // - stringObject: "=", "contains", "does not contain", "starts with", "ends with" + // - numberObject: "=", ">", "<", ">=", "<=" + // - boolean: "=", "<>" + // - null: "is null", "is not null" + "value": any, // Required (except for null type). Value to compare against. Type depends on filter type + "key": string // Required only for stringObject, numberObject, and categoryOptions types when filtering on nested fields like metadata + } + ] + ``` + + ## Available Columns + + ### Core Trace Fields + - `id` (string) - Trace ID + - `name` (string) - Trace name + - `timestamp` (datetime) - Trace timestamp + - `userId` (string) - User ID + - `sessionId` (string) - Session ID + - `environment` (string) - Environment tag + - `version` (string) - Version tag + - `release` (string) - Release tag + - `tags` (arrayOptions) - Array of tags + - `bookmarked` (boolean) - Bookmark status + + ### Structured Data + - `metadata` (stringObject/numberObject/categoryOptions) - Metadata key-value pairs. Use `key` parameter to filter on specific metadata keys. + + ### Aggregated Metrics (from observations) + These metrics are aggregated from all observations within the trace: + - `latency` (number) - Latency in seconds (time from first observation start to last observation end) + - `inputTokens` (number) - Total input tokens across all observations + - `outputTokens` (number) - Total output tokens across all observations + - `totalTokens` (number) - Total tokens (alias: `tokens`) + - `inputCost` (number) - Total input cost in USD + - `outputCost` (number) - Total output cost in USD + - `totalCost` (number) - Total cost in USD + + ### Observation Level Aggregations + These fields aggregate observation levels within the trace: + - `level` (string) - Highest severity level (ERROR > WARNING > DEFAULT > DEBUG) + - `warningCount` (number) - Count of WARNING level observations + - `errorCount` (number) - Count of ERROR level observations + - `defaultCount` (number) - Count of DEFAULT level observations + - `debugCount` (number) - Count of DEBUG level observations + + ### Scores (requires join with scores table) + - `scores_avg` (number) - Average of numeric scores (alias: `scores`) + - `score_categories` (categoryOptions) - Categorical score values + + ## Filter Examples + ```json + [ + { + "type": "datetime", + "column": "timestamp", + "operator": ">=", + "value": "2024-01-01T00:00:00Z" + }, + { + "type": "string", + "column": "userId", + "operator": "=", + "value": "user-123" + }, + { + "type": "number", + "column": "totalCost", + "operator": ">=", + "value": 0.01 + }, + { + "type": "arrayOptions", + "column": "tags", + "operator": "all of", + "value": ["production", "critical"] + }, + { + "type": "stringObject", + "column": "metadata", + "key": "customer_tier", + "operator": "=", + "value": "enterprise" + } + ] + ``` + + ## Performance Notes + - Filtering on `userId`, `sessionId`, or `metadata` may enable skip indexes for better query performance + - Score filters require a join with the scores table and may impact query performance + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Traces] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/traces", + method="GET", + params={ + "page": page, + "limit": limit, + "userId": user_id, + "name": name, + "sessionId": session_id, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, + "orderBy": order_by, + "tags": tags, + "version": version, + "release": release, + "environment": environment, + "fields": fields, + "filter": filter, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Traces, + parse_obj_as( + type_=Traces, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + def delete_multiple( + self, + *, + trace_ids: typing.Sequence[str], + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeleteTraceResponse]: + """ + Delete multiple traces + + Parameters + ---------- + trace_ids : typing.Sequence[str] + List of trace IDs to delete + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeleteTraceResponse] + """ + _response = self._client_wrapper.httpx_client.request( + "api/public/traces", + method="DELETE", + json={ + "traceIds": trace_ids, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteTraceResponse, + parse_obj_as( + type_=DeleteTraceResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + +class AsyncRawTraceClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[TraceWithFullDetails]: + """ + Get a specific trace + + Parameters + ---------- + trace_id : str + The unique langfuse identifier of a trace + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[TraceWithFullDetails] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/traces/{jsonable_encoder(trace_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + TraceWithFullDetails, + parse_obj_as( + type_=TraceWithFullDetails, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete( + self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[DeleteTraceResponse]: + """ + Delete a specific trace + + Parameters + ---------- + trace_id : str + The unique langfuse identifier of the trace to delete + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeleteTraceResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/public/traces/{jsonable_encoder(trace_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteTraceResponse, + parse_obj_as( + type_=DeleteTraceResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def list( + self, + *, + page: typing.Optional[int] = None, + limit: typing.Optional[int] = None, + user_id: typing.Optional[str] = None, + name: typing.Optional[str] = None, + session_id: typing.Optional[str] = None, + from_timestamp: typing.Optional[dt.datetime] = None, + to_timestamp: typing.Optional[dt.datetime] = None, + order_by: typing.Optional[str] = None, + tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + version: typing.Optional[str] = None, + release: typing.Optional[str] = None, + environment: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + fields: typing.Optional[str] = None, + filter: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Traces]: + """ + Get list of traces + + Parameters + ---------- + page : typing.Optional[int] + Page number, starts at 1 + + limit : typing.Optional[int] + Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. + + user_id : typing.Optional[str] + + name : typing.Optional[str] + + session_id : typing.Optional[str] + + from_timestamp : typing.Optional[dt.datetime] + Optional filter to only include traces with a trace.timestamp on or after a certain datetime (ISO 8601) + + to_timestamp : typing.Optional[dt.datetime] + Optional filter to only include traces with a trace.timestamp before a certain datetime (ISO 8601) + + order_by : typing.Optional[str] + Format of the string [field].[asc/desc]. Fields: id, timestamp, name, userId, release, version, public, bookmarked, sessionId. Example: timestamp.asc + + tags : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Only traces that include all of these tags will be returned. + + version : typing.Optional[str] + Optional filter to only include traces with a certain version. + + release : typing.Optional[str] + Optional filter to only include traces with a certain release. + + environment : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Optional filter for traces where the environment is one of the provided values. + + fields : typing.Optional[str] + Comma-separated list of fields to include in the response. Available field groups: 'core' (always included), 'io' (input, output, metadata), 'scores', 'observations', 'metrics'. If not specified, all fields are returned. Example: 'core,scores,metrics'. Note: Excluded 'observations' or 'scores' fields return empty arrays; excluded 'metrics' returns -1 for 'totalCost' and 'latency'. + + filter : typing.Optional[str] + JSON string containing an array of filter conditions. When provided, this takes precedence over query parameter filters (userId, name, sessionId, tags, version, release, environment, fromTimestamp, toTimestamp). + + ## Filter Structure + Each filter condition has the following structure: + ```json + [ + { + "type": string, // Required. One of: "datetime", "string", "number", "stringOptions", "categoryOptions", "arrayOptions", "stringObject", "numberObject", "boolean", "null" + "column": string, // Required. Column to filter on (see available columns below) + "operator": string, // Required. Operator based on type: + // - datetime: ">", "<", ">=", "<=" + // - string: "=", "contains", "does not contain", "starts with", "ends with" + // - stringOptions: "any of", "none of" + // - categoryOptions: "any of", "none of" + // - arrayOptions: "any of", "none of", "all of" + // - number: "=", ">", "<", ">=", "<=" + // - stringObject: "=", "contains", "does not contain", "starts with", "ends with" + // - numberObject: "=", ">", "<", ">=", "<=" + // - boolean: "=", "<>" + // - null: "is null", "is not null" + "value": any, // Required (except for null type). Value to compare against. Type depends on filter type + "key": string // Required only for stringObject, numberObject, and categoryOptions types when filtering on nested fields like metadata + } + ] + ``` + + ## Available Columns + + ### Core Trace Fields + - `id` (string) - Trace ID + - `name` (string) - Trace name + - `timestamp` (datetime) - Trace timestamp + - `userId` (string) - User ID + - `sessionId` (string) - Session ID + - `environment` (string) - Environment tag + - `version` (string) - Version tag + - `release` (string) - Release tag + - `tags` (arrayOptions) - Array of tags + - `bookmarked` (boolean) - Bookmark status + + ### Structured Data + - `metadata` (stringObject/numberObject/categoryOptions) - Metadata key-value pairs. Use `key` parameter to filter on specific metadata keys. + + ### Aggregated Metrics (from observations) + These metrics are aggregated from all observations within the trace: + - `latency` (number) - Latency in seconds (time from first observation start to last observation end) + - `inputTokens` (number) - Total input tokens across all observations + - `outputTokens` (number) - Total output tokens across all observations + - `totalTokens` (number) - Total tokens (alias: `tokens`) + - `inputCost` (number) - Total input cost in USD + - `outputCost` (number) - Total output cost in USD + - `totalCost` (number) - Total cost in USD + + ### Observation Level Aggregations + These fields aggregate observation levels within the trace: + - `level` (string) - Highest severity level (ERROR > WARNING > DEFAULT > DEBUG) + - `warningCount` (number) - Count of WARNING level observations + - `errorCount` (number) - Count of ERROR level observations + - `defaultCount` (number) - Count of DEFAULT level observations + - `debugCount` (number) - Count of DEBUG level observations + + ### Scores (requires join with scores table) + - `scores_avg` (number) - Average of numeric scores (alias: `scores`) + - `score_categories` (categoryOptions) - Categorical score values + + ## Filter Examples + ```json + [ + { + "type": "datetime", + "column": "timestamp", + "operator": ">=", + "value": "2024-01-01T00:00:00Z" + }, + { + "type": "string", + "column": "userId", + "operator": "=", + "value": "user-123" + }, + { + "type": "number", + "column": "totalCost", + "operator": ">=", + "value": 0.01 + }, + { + "type": "arrayOptions", + "column": "tags", + "operator": "all of", + "value": ["production", "critical"] + }, + { + "type": "stringObject", + "column": "metadata", + "key": "customer_tier", + "operator": "=", + "value": "enterprise" + } + ] + ``` + + ## Performance Notes + - Filtering on `userId`, `sessionId`, or `metadata` may enable skip indexes for better query performance + - Score filters require a join with the scores table and may impact query performance + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Traces] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/traces", + method="GET", + params={ + "page": page, + "limit": limit, + "userId": user_id, + "name": name, + "sessionId": session_id, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, + "orderBy": order_by, + "tags": tags, + "version": version, + "release": release, + "environment": environment, + "fields": fields, + "filter": filter, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Traces, + parse_obj_as( + type_=Traces, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) + + async def delete_multiple( + self, + *, + trace_ids: typing.Sequence[str], + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeleteTraceResponse]: + """ + Delete multiple traces + + Parameters + ---------- + trace_ids : typing.Sequence[str] + List of trace IDs to delete + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeleteTraceResponse] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/public/traces", + method="DELETE", + json={ + "traceIds": trace_ids, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeleteTraceResponse, + parse_obj_as( + type_=DeleteTraceResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise Error( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 403: + raise AccessDeniedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 405: + raise MethodNotAllowedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response.text, + ) + raise ApiError( + status_code=_response.status_code, + headers=dict(_response.headers), + body=_response_json, + ) diff --git a/langfuse/api/resources/trace/types/__init__.py b/langfuse/api/resources/trace/types/__init__.py index 929a1e047..3eab30d21 100644 --- a/langfuse/api/resources/trace/types/__init__.py +++ b/langfuse/api/resources/trace/types/__init__.py @@ -1,7 +1,46 @@ # This file was auto-generated by Fern from our API Definition. -from .delete_trace_response import DeleteTraceResponse -from .sort import Sort -from .traces import Traces +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .delete_trace_response import DeleteTraceResponse + from .sort import Sort + from .traces import Traces +_dynamic_imports: typing.Dict[str, str] = { + "DeleteTraceResponse": ".delete_trace_response", + "Sort": ".sort", + "Traces": ".traces", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["DeleteTraceResponse", "Sort", "Traces"] diff --git a/langfuse/api/resources/trace/types/delete_trace_response.py b/langfuse/api/resources/trace/types/delete_trace_response.py index 450c894e2..936577cdd 100644 --- a/langfuse/api/resources/trace/types/delete_trace_response.py +++ b/langfuse/api/resources/trace/types/delete_trace_response.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DeleteTraceResponse(pydantic_v1.BaseModel): +class DeleteTraceResponse(UniversalBaseModel): message: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/trace/types/sort.py b/langfuse/api/resources/trace/types/sort.py index 76a5045b6..6cea1c58b 100644 --- a/langfuse/api/resources/trace/types/sort.py +++ b/langfuse/api/resources/trace/types/sort.py @@ -1,42 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class Sort(pydantic_v1.BaseModel): +class Sort(UniversalBaseModel): id: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/trace/types/traces.py b/langfuse/api/resources/trace/types/traces.py index 09f58978f..28386bfe4 100644 --- a/langfuse/api/resources/trace/types/traces.py +++ b/langfuse/api/resources/trace/types/traces.py @@ -1,45 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...commons.types.trace_with_details import TraceWithDetails from ...utils.resources.pagination.types.meta_response import MetaResponse -class Traces(pydantic_v1.BaseModel): +class Traces(UniversalBaseModel): data: typing.List[TraceWithDetails] meta: MetaResponse - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/langfuse/api/resources/utils/__init__.py b/langfuse/api/resources/utils/__init__.py index b4ac87b8a..96853f04d 100644 --- a/langfuse/api/resources/utils/__init__.py +++ b/langfuse/api/resources/utils/__init__.py @@ -1,5 +1,43 @@ # This file was auto-generated by Fern from our API Definition. -from .resources import MetaResponse, pagination +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .resources import MetaResponse, pagination +_dynamic_imports: typing.Dict[str, str] = { + "MetaResponse": ".resources", + "pagination": ".resources", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["MetaResponse", "pagination"] diff --git a/langfuse/api/resources/utils/resources/__init__.py b/langfuse/api/resources/utils/resources/__init__.py index 7e65ff270..b272f64b5 100644 --- a/langfuse/api/resources/utils/resources/__init__.py +++ b/langfuse/api/resources/utils/resources/__init__.py @@ -1,6 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from . import pagination -from .pagination import MetaResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from . import pagination + from .pagination import MetaResponse +_dynamic_imports: typing.Dict[str, str] = { + "MetaResponse": ".pagination", + "pagination": ".pagination", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["MetaResponse", "pagination"] diff --git a/langfuse/api/resources/utils/resources/pagination/__init__.py b/langfuse/api/resources/utils/resources/pagination/__init__.py index 9bd1e5f71..50821832d 100644 --- a/langfuse/api/resources/utils/resources/pagination/__init__.py +++ b/langfuse/api/resources/utils/resources/pagination/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .types import MetaResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import MetaResponse +_dynamic_imports: typing.Dict[str, str] = {"MetaResponse": ".types"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["MetaResponse"] diff --git a/langfuse/api/resources/utils/resources/pagination/types/__init__.py b/langfuse/api/resources/utils/resources/pagination/types/__init__.py index 79bb6018e..5c0d83028 100644 --- a/langfuse/api/resources/utils/resources/pagination/types/__init__.py +++ b/langfuse/api/resources/utils/resources/pagination/types/__init__.py @@ -1,5 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -from .meta_response import MetaResponse +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .meta_response import MetaResponse +_dynamic_imports: typing.Dict[str, str] = {"MetaResponse": ".meta_response"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__}" + ) + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + __all__ = ["MetaResponse"] diff --git a/langfuse/api/resources/utils/resources/pagination/types/meta_response.py b/langfuse/api/resources/utils/resources/pagination/types/meta_response.py index 2d082c68f..3d1332102 100644 --- a/langfuse/api/resources/utils/resources/pagination/types/meta_response.py +++ b/langfuse/api/resources/utils/resources/pagination/types/meta_response.py @@ -1,62 +1,45 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ......core.datetime_utils import serialize_datetime -from ......core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import typing_extensions +from ......core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ......core.serialization import FieldMetadata -class MetaResponse(pydantic_v1.BaseModel): - page: int = pydantic_v1.Field() +class MetaResponse(UniversalBaseModel): + page: int = pydantic.Field() """ current page number """ - limit: int = pydantic_v1.Field() + limit: int = pydantic.Field() """ number of items per page """ - total_items: int = pydantic_v1.Field(alias="totalItems") + total_items: typing_extensions.Annotated[int, FieldMetadata(alias="totalItems")] = ( + pydantic.Field() + ) """ number of total items given the current filters/selection (if any) """ - total_pages: int = pydantic_v1.Field(alias="totalPages") + total_pages: typing_extensions.Annotated[int, FieldMetadata(alias="totalPages")] = ( + pydantic.Field() + ) """ number of total pages given the current limit """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="allow", frozen=True + ) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/pyproject.toml b/pyproject.toml index fc5a0225a..a5c139884 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,9 +97,7 @@ ignore_missing_imports = true [[tool.mypy.overrides]] module = [ - "langfuse.api.resources.*", - "langfuse.api.core.*", - "langfuse.api.client" + "langfuse.api.*", ] ignore_errors = true