Documentation Index
Fetch the complete documentation index at: https://docs.uselemma.ai/llms.txt
Use this file to discover all available pages before exploring further.
If you already export spans to Phoenix, you can add Lemma as another destination from the same instrumentation pipeline.
Langfuse is recommended for greenfield instrumentation, but Phoenix users do not need to switch instrumentation stacks to send traces to Lemma.
Setup
Export the same OpenInference/OpenTelemetry spans to Phoenix and Lemma.
Install OpenTelemetry exporters and any instrumentation you already use with Phoenix:npm install @arizeai/openinference-instrumentation-openai @opentelemetry/api @opentelemetry/exporter-trace-otlp-proto @opentelemetry/instrumentation @opentelemetry/sdk-trace-base @opentelemetry/sdk-trace-node openai
// instrumentation.ts
import { OpenAIInstrumentation } from "@arizeai/openinference-instrumentation-openai";
import { registerInstrumentations } from "@opentelemetry/instrumentation";
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
import { BatchSpanProcessor } from "@opentelemetry/sdk-trace-base";
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
const phoenixHeaders = process.env.PHOENIX_API_KEY
? { Authorization: `Bearer ${process.env.PHOENIX_API_KEY}` }
: {};
const provider = new NodeTracerProvider({
spanProcessors: [
new BatchSpanProcessor(
new OTLPTraceExporter({
url: process.env.PHOENIX_OTLP_TRACES_URL,
headers: phoenixHeaders,
}),
),
new BatchSpanProcessor(
new OTLPTraceExporter({
url: process.env.LEMMA_BASE_URL,
headers: {
Authorization: `Bearer ${process.env.LEMMA_API_KEY}`,
"X-Lemma-Project-ID": process.env.LEMMA_PROJECT_ID,
},
}),
),
],
});
provider.register();
registerInstrumentations({
instrumentations: [new OpenAIInstrumentation()],
});
import OpenAI from "openai";
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
await openai.chat.completions.create({
model: "gpt-4o-mini",
messages: [{ role: "user", content: "Summarize this ticket" }],
});
Install OpenInference instrumentation and the OpenTelemetry exporter:pip install openai openinference-instrumentation-openai opentelemetry-exporter-otlp opentelemetry-sdk
# instrumentation.py
import os
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
provider = TracerProvider()
phoenix_headers = (
{"Authorization": f"Bearer {os.environ['PHOENIX_API_KEY']}"}
if os.getenv("PHOENIX_API_KEY")
else {}
)
provider.add_span_processor(
BatchSpanProcessor(
OTLPSpanExporter(
endpoint=os.environ["PHOENIX_OTLP_TRACES_URL"],
headers=phoenix_headers,
)
)
)
provider.add_span_processor(
BatchSpanProcessor(
OTLPSpanExporter(
endpoint=os.environ["LEMMA_BASE_URL"],
headers={
"Authorization": f"Bearer {os.environ['LEMMA_API_KEY']}",
"X-Lemma-Project-ID": os.environ["LEMMA_PROJECT_ID"],
},
)
)
)
trace.set_tracer_provider(provider)
OpenAIInstrumentor().instrument(tracer_provider=provider)
from openai import OpenAI
client = OpenAI()
client.chat.completions.create(
model="gpt-4o-mini",
messages=[{"role": "user", "content": "Summarize this ticket"}],
)
Required environment variables
PHOENIX_OTLP_TRACES_URL
LEMMA_BASE_URL (https://api.uselemma.ai/otel/v1/traces)
LEMMA_API_KEY
LEMMA_PROJECT_ID
References