Langfuse Integration
Langfuse provides observability and tracing for LLM applications.
Installation
pip install langfuse openaiConfiguration
import os
# Langfuse configuration
os.environ["LANGFUSE_SECRET_KEY"] = "sk-lf-..."
os.environ["LANGFUSE_PUBLIC_KEY"] = "pk-lf-..."
os.environ["LANGFUSE_HOST"] = "https://cloud.langfuse.com"
# LangMart configuration
os.environ["OPENAI_API_KEY"] = "your-langmart-api-key"Basic Usage
from langfuse.openai import openai
client = openai.OpenAI(
base_url="https://api.langmart.ai/v1",
)
# Requests are automatically logged to Langfuse
response = client.chat.completions.create(
model="anthropic/claude-opus-4.5",
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Tell me a fun fact about space."},
],
name="fun-fact-request",
)
print(response.choices[0].message.content)Advanced Tracing
from langfuse import observe
from langfuse.openai import openai
client = openai.OpenAI(base_url="https://api.langmart.ai/v1")
@observe()
def analyze_text(text: str):
summary = summarize_text(text)
sentiment = analyze_sentiment(summary)
return {"summary": summary, "sentiment": sentiment}
@observe()
def summarize_text(text: str) -> str:
response = client.chat.completions.create(
model="openai/gpt-5.2",
messages=[
{"role": "system", "content": "Summarize concisely."},
{"role": "user", "content": f"Summarize: {text}"},
],
name="summarize-text",
)
return response.choices[0].message.content
@observe()
def analyze_sentiment(text: str) -> str:
response = client.chat.completions.create(
model="openai/gpt-5.2",
messages=[
{"role": "system", "content": "Analyze sentiment."},
{"role": "user", "content": f"Sentiment: {text}"},
],
name="analyze-sentiment",
)
return response.choices[0].message.content