From 254923c7a740e61bf39e5c65f1934350d8b3f249 Mon Sep 17 00:00:00 2001 From: Ali Saleh Date: Tue, 2 Dec 2025 23:08:56 +0500 Subject: [PATCH] docs: Update Instructions For Phoenix Integration --- .../docs/observability/phoenix_integration.md | 79 +++++++++++++++---- 1 file changed, 64 insertions(+), 15 deletions(-) diff --git a/docs/my-website/docs/observability/phoenix_integration.md b/docs/my-website/docs/observability/phoenix_integration.md index ad3374399344..898d780668db 100644 --- a/docs/my-website/docs/observability/phoenix_integration.md +++ b/docs/my-website/docs/observability/phoenix_integration.md @@ -6,7 +6,7 @@ Open source tracing and evaluation platform :::tip -This is community maintained, Please make an issue if you run into a bug +This is community maintained. Please make an issue if you run into a bug: https://github.com/BerriAI/litellm ::: @@ -31,19 +31,16 @@ litellm.callbacks = ["arize_phoenix"] import litellm import os -os.environ["PHOENIX_API_KEY"] = "" # Necessary only using Phoenix Cloud -os.environ["PHOENIX_COLLECTOR_HTTP_ENDPOINT"] = "" # The URL of your Phoenix OSS instance e.g. http://localhost:6006/v1/traces -os.environ["PHOENIX_PROJECT_NAME"]="litellm" # OPTIONAL: you can configure project names, otherwise traces would go to "default" project +# Set env variables +os.environ["PHOENIX_API_KEY"] = "d0*****" # Set the Phoenix API key here. It is necessary only when using Phoenix Cloud. +os.environ["PHOENIX_COLLECTOR_HTTP_ENDPOINT"] = "https://app.phoenix.arize.com/s//v1/traces" # Set the URL of your Phoenix OSS instance, otherwise tracer would use https://app.phoenix.arize.com/v1/traces for Phoenix Cloud. +os.environ["PHOENIX_PROJECT_NAME"] = "litellm" # Configure the project name, otherwise traces would go to "default" project. +os.environ['OPENAI_API_KEY'] = "fake-key" # Set the OpenAI API key here. -# This defaults to https://app.phoenix.arize.com/v1/traces for Phoenix Cloud - -# LLM API Keys -os.environ['OPENAI_API_KEY']="" - -# set arize as a callback, litellm will send the data to arize +# Set arize_phoenix as a callback & LiteLLM will send the data to Phoenix. litellm.callbacks = ["arize_phoenix"] - -# openai call + +# OpenAI call response = litellm.completion( model="gpt-3.5-turbo", messages=[ @@ -52,8 +49,9 @@ response = litellm.completion( ) ``` -### Using with LiteLLM Proxy +## Using with LiteLLM Proxy +1. Setup config.yaml ```yaml model_list: @@ -66,12 +64,63 @@ model_list: litellm_settings: callbacks: ["arize_phoenix"] +general_settings: + master_key: "sk-1234" + environment_variables: PHOENIX_API_KEY: "d0*****" - PHOENIX_COLLECTOR_ENDPOINT: "https://app.phoenix.arize.com/v1/traces" # OPTIONAL, for setting the GRPC endpoint - PHOENIX_COLLECTOR_HTTP_ENDPOINT: "https://app.phoenix.arize.com/v1/traces" # OPTIONAL, for setting the HTTP endpoint + PHOENIX_COLLECTOR_ENDPOINT: "https://app.phoenix.arize.com/s//v1/traces" # OPTIONAL - For setting the gRPC endpoint + PHOENIX_COLLECTOR_HTTP_ENDPOINT: "https://app.phoenix.arize.com/s//v1/traces" # OPTIONAL - For setting the HTTP endpoint +``` + +2. Start the proxy + +```bash +litellm --config config.yaml +``` + +3. Test it! + +```bash +curl -X POST 'http://0.0.0.0:4000/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ "model": "gpt-4o", "messages": [{"role": "user", "content": "Hi 👋 - i'm openai"}]}' +``` + +## Supported Phoenix Endpoints +Phoenix now supports multiple deployment types. The correct endpoint depends on which version of Phoenix Cloud you are using. + +**Phoenix Cloud (With Spaces - New Version)** +Use this if your Phoenix URL contains `/s/` path. + +```bash +https://app.phoenix.arize.com/s//v1/traces +``` + +**Phoenix Cloud (Legacy - Deprecated)** +Use this only if your deployment still shows the `/legacy` pattern. + +```bash +https://app.phoenix.arize.com/legacy/v1/traces ``` +**Phoenix Cloud (Without Spaces - Old Version)** +Use this if your Phoenix Cloud URL does not contain `/s/` or `/legacy` path. + +```bash +https://app.phoenix.arize.com/v1/traces +``` + +**Self-Hosted Phoenix (Local Instance)** +Use this when running Phoenix on your machine or a private server. + +```bash +http://localhost:6006/v1/traces +``` + +Depending on which Phoenix Cloud version or deployment you are using, you should set the corresponding endpoint in `PHOENIX_COLLECTOR_HTTP_ENDPOINT` or `PHOENIX_COLLECTOR_ENDPOINT`. + ## Support & Talk to Founders - [Schedule Demo 👋](https://calendly.com/d/4mp-gd3-k5k/berriai-1-1-onboarding-litellm-hosted-version)