about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/litellm/integrations/arize
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/integrations/arize')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/integrations/arize/_utils.py126
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize.py105
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize_phoenix.py73
3 files changed, 304 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/integrations/arize/_utils.py b/.venv/lib/python3.12/site-packages/litellm/integrations/arize/_utils.py
new file mode 100644
index 00000000..487304cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/integrations/arize/_utils.py
@@ -0,0 +1,126 @@
+from typing import TYPE_CHECKING, Any, Optional
+
+from litellm._logging import verbose_logger
+from litellm.litellm_core_utils.safe_json_dumps import safe_dumps
+from litellm.types.utils import StandardLoggingPayload
+
+if TYPE_CHECKING:
+    from opentelemetry.trace import Span as _Span
+
+    Span = _Span
+else:
+    Span = Any
+
+
+def set_attributes(span: Span, kwargs, response_obj):
+    from litellm.integrations._types.open_inference import (
+        MessageAttributes,
+        OpenInferenceSpanKindValues,
+        SpanAttributes,
+    )
+
+    try:
+        standard_logging_payload: Optional[StandardLoggingPayload] = kwargs.get(
+            "standard_logging_object"
+        )
+
+        #############################################
+        ############ LLM CALL METADATA ##############
+        #############################################
+
+        if standard_logging_payload and (
+            metadata := standard_logging_payload["metadata"]
+        ):
+            span.set_attribute(SpanAttributes.METADATA, safe_dumps(metadata))
+
+        #############################################
+        ########## LLM Request Attributes ###########
+        #############################################
+
+        # The name of the LLM a request is being made to
+        if kwargs.get("model"):
+            span.set_attribute(SpanAttributes.LLM_MODEL_NAME, kwargs.get("model"))
+
+        span.set_attribute(
+            SpanAttributes.OPENINFERENCE_SPAN_KIND,
+            OpenInferenceSpanKindValues.LLM.value,
+        )
+        messages = kwargs.get("messages")
+
+        # for /chat/completions
+        # https://docs.arize.com/arize/large-language-models/tracing/semantic-conventions
+        if messages:
+            span.set_attribute(
+                SpanAttributes.INPUT_VALUE,
+                messages[-1].get("content", ""),  # get the last message for input
+            )
+
+            # LLM_INPUT_MESSAGES shows up under `input_messages` tab on the span page
+            for idx, msg in enumerate(messages):
+                # Set the role per message
+                span.set_attribute(
+                    f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_ROLE}",
+                    msg["role"],
+                )
+                # Set the content per message
+                span.set_attribute(
+                    f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_CONTENT}",
+                    msg.get("content", ""),
+                )
+
+        if standard_logging_payload and (
+            model_params := standard_logging_payload["model_parameters"]
+        ):
+            # The Generative AI Provider: Azure, OpenAI, etc.
+            span.set_attribute(
+                SpanAttributes.LLM_INVOCATION_PARAMETERS, safe_dumps(model_params)
+            )
+
+            if model_params.get("user"):
+                user_id = model_params.get("user")
+                if user_id is not None:
+                    span.set_attribute(SpanAttributes.USER_ID, user_id)
+
+        #############################################
+        ########## LLM Response Attributes ##########
+        # https://docs.arize.com/arize/large-language-models/tracing/semantic-conventions
+        #############################################
+        if hasattr(response_obj, "get"):
+            for choice in response_obj.get("choices", []):
+                response_message = choice.get("message", {})
+                span.set_attribute(
+                    SpanAttributes.OUTPUT_VALUE, response_message.get("content", "")
+                )
+
+                # This shows up under `output_messages` tab on the span page
+                # This code assumes a single response
+                span.set_attribute(
+                    f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.0.{MessageAttributes.MESSAGE_ROLE}",
+                    response_message.get("role"),
+                )
+                span.set_attribute(
+                    f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.0.{MessageAttributes.MESSAGE_CONTENT}",
+                    response_message.get("content", ""),
+                )
+
+            usage = response_obj.get("usage")
+            if usage:
+                span.set_attribute(
+                    SpanAttributes.LLM_TOKEN_COUNT_TOTAL,
+                    usage.get("total_tokens"),
+                )
+
+                # The number of tokens used in the LLM response (completion).
+                span.set_attribute(
+                    SpanAttributes.LLM_TOKEN_COUNT_COMPLETION,
+                    usage.get("completion_tokens"),
+                )
+
+                # The number of tokens used in the LLM prompt.
+                span.set_attribute(
+                    SpanAttributes.LLM_TOKEN_COUNT_PROMPT,
+                    usage.get("prompt_tokens"),
+                )
+        pass
+    except Exception as e:
+        verbose_logger.error(f"Error setting arize attributes: {e}")
diff --git a/.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize.py b/.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize.py
new file mode 100644
index 00000000..7a0fb785
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize.py
@@ -0,0 +1,105 @@
+"""
+arize AI is OTEL compatible
+
+this file has Arize ai specific helper functions
+"""
+
+import os
+from datetime import datetime
+from typing import TYPE_CHECKING, Any, Optional, Union
+
+from litellm.integrations.arize import _utils
+from litellm.integrations.opentelemetry import OpenTelemetry
+from litellm.types.integrations.arize import ArizeConfig
+from litellm.types.services import ServiceLoggerPayload
+
+if TYPE_CHECKING:
+    from opentelemetry.trace import Span as _Span
+
+    from litellm.types.integrations.arize import Protocol as _Protocol
+
+    Protocol = _Protocol
+    Span = _Span
+else:
+    Protocol = Any
+    Span = Any
+
+
+class ArizeLogger(OpenTelemetry):
+
+    def set_attributes(self, span: Span, kwargs, response_obj: Optional[Any]):
+        ArizeLogger.set_arize_attributes(span, kwargs, response_obj)
+        return
+
+    @staticmethod
+    def set_arize_attributes(span: Span, kwargs, response_obj):
+        _utils.set_attributes(span, kwargs, response_obj)
+        return
+
+    @staticmethod
+    def get_arize_config() -> ArizeConfig:
+        """
+        Helper function to get Arize configuration.
+
+        Returns:
+            ArizeConfig: A Pydantic model containing Arize configuration.
+
+        Raises:
+            ValueError: If required environment variables are not set.
+        """
+        space_key = os.environ.get("ARIZE_SPACE_KEY")
+        api_key = os.environ.get("ARIZE_API_KEY")
+
+        grpc_endpoint = os.environ.get("ARIZE_ENDPOINT")
+        http_endpoint = os.environ.get("ARIZE_HTTP_ENDPOINT")
+
+        endpoint = None
+        protocol: Protocol = "otlp_grpc"
+
+        if grpc_endpoint:
+            protocol = "otlp_grpc"
+            endpoint = grpc_endpoint
+        elif http_endpoint:
+            protocol = "otlp_http"
+            endpoint = http_endpoint
+        else:
+            protocol = "otlp_grpc"
+            endpoint = "https://otlp.arize.com/v1"
+
+        return ArizeConfig(
+            space_key=space_key,
+            api_key=api_key,
+            protocol=protocol,
+            endpoint=endpoint,
+        )
+
+    async def async_service_success_hook(
+        self,
+        payload: ServiceLoggerPayload,
+        parent_otel_span: Optional[Span] = None,
+        start_time: Optional[Union[datetime, float]] = None,
+        end_time: Optional[Union[datetime, float]] = None,
+        event_metadata: Optional[dict] = None,
+    ):
+        """Arize is used mainly for LLM I/O tracing, sending router+caching metrics adds bloat to arize logs"""
+        pass
+
+    async def async_service_failure_hook(
+        self,
+        payload: ServiceLoggerPayload,
+        error: Optional[str] = "",
+        parent_otel_span: Optional[Span] = None,
+        start_time: Optional[Union[datetime, float]] = None,
+        end_time: Optional[Union[float, datetime]] = None,
+        event_metadata: Optional[dict] = None,
+    ):
+        """Arize is used mainly for LLM I/O tracing, sending router+caching metrics adds bloat to arize logs"""
+        pass
+
+    def create_litellm_proxy_request_started_span(
+        self,
+        start_time: datetime,
+        headers: dict,
+    ):
+        """Arize is used mainly for LLM I/O tracing, sending Proxy Server Request adds bloat to arize logs"""
+        pass
diff --git a/.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize_phoenix.py b/.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize_phoenix.py
new file mode 100644
index 00000000..d7b7d581
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/integrations/arize/arize_phoenix.py
@@ -0,0 +1,73 @@
+import os
+from typing import TYPE_CHECKING, Any
+from litellm.integrations.arize import _utils
+from litellm._logging import verbose_logger
+from litellm.types.integrations.arize_phoenix import ArizePhoenixConfig
+
+if TYPE_CHECKING:
+    from .opentelemetry import OpenTelemetryConfig as _OpenTelemetryConfig
+    from litellm.types.integrations.arize import Protocol as _Protocol
+    from opentelemetry.trace import Span as _Span
+
+    Protocol = _Protocol
+    OpenTelemetryConfig = _OpenTelemetryConfig
+    Span = _Span
+else:
+    Protocol = Any
+    OpenTelemetryConfig = Any
+    Span = Any
+
+
+ARIZE_HOSTED_PHOENIX_ENDPOINT = "https://app.phoenix.arize.com/v1/traces"
+
+class ArizePhoenixLogger:
+    @staticmethod
+    def set_arize_phoenix_attributes(span: Span, kwargs, response_obj):
+        _utils.set_attributes(span, kwargs, response_obj)
+        return
+
+    @staticmethod
+    def get_arize_phoenix_config() -> ArizePhoenixConfig:
+        """
+        Retrieves the Arize Phoenix configuration based on environment variables.
+
+        Returns:
+            ArizePhoenixConfig: A Pydantic model containing Arize Phoenix configuration.
+        """
+        api_key = os.environ.get("PHOENIX_API_KEY", None)
+        grpc_endpoint = os.environ.get("PHOENIX_COLLECTOR_ENDPOINT", None)
+        http_endpoint = os.environ.get("PHOENIX_COLLECTOR_HTTP_ENDPOINT", None)
+
+        endpoint = None
+        protocol: Protocol = "otlp_http"
+
+        if http_endpoint:
+            endpoint = http_endpoint
+            protocol = "otlp_http"
+        elif grpc_endpoint:
+            endpoint = grpc_endpoint
+            protocol = "otlp_grpc"
+        else:
+            endpoint = ARIZE_HOSTED_PHOENIX_ENDPOINT
+            protocol = "otlp_http"       
+            verbose_logger.debug(
+                f"No PHOENIX_COLLECTOR_ENDPOINT or PHOENIX_COLLECTOR_HTTP_ENDPOINT found, using default endpoint with http: {ARIZE_HOSTED_PHOENIX_ENDPOINT}"
+            )
+
+        otlp_auth_headers = None
+        # If the endpoint is the Arize hosted Phoenix endpoint, use the api_key as the auth header as currently it is uses
+        # a slightly different auth header format than self hosted phoenix
+        if endpoint == ARIZE_HOSTED_PHOENIX_ENDPOINT: 
+            if api_key is None:
+                raise ValueError("PHOENIX_API_KEY must be set when the Arize hosted Phoenix endpoint is used.")
+            otlp_auth_headers = f"api_key={api_key}"
+        elif api_key is not None:
+            # api_key/auth is optional for self hosted phoenix
+            otlp_auth_headers = f"Authorization=Bearer {api_key}"
+
+        return ArizePhoenixConfig(
+            otlp_auth_headers=otlp_auth_headers,
+            protocol=protocol,
+            endpoint=endpoint
+        )
+