Documentation Index
Fetch the complete documentation index at: https://langwatch.ai/docs/llms.txt
Use this file to discover all available pages before exploring further.
LangWatch supports tracing Anthropic Claude API calls using the same otelopenai middleware used for OpenAI. Configure the client to point to Anthropic’s API endpoint.
Installation
go get github.com/langwatch/langwatch/sdk-go github.com/openai/openai-go
Usage
Set LANGWATCH_API_KEY and ANTHROPIC_API_KEY environment variables before running.
package main
import (
"context"
"log"
"os"
langwatch "github.com/langwatch/langwatch/sdk-go"
otelopenai "github.com/langwatch/langwatch/sdk-go/instrumentation/openai"
"github.com/openai/openai-go"
oaioption "github.com/openai/openai-go/option"
"go.opentelemetry.io/otel"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
)
func main() {
ctx := context.Background()
// Set up LangWatch exporter
exporter, err := langwatch.NewDefaultExporter(ctx)
if err != nil {
log.Fatalf("failed to create exporter: %v", err)
}
tp := sdktrace.NewTracerProvider(sdktrace.WithBatcher(exporter))
otel.SetTracerProvider(tp)
defer tp.Shutdown(ctx) // Critical: ensures traces are flushed
// Create Anthropic client via OpenAI-compatible API
client := openai.NewClient(
oaioption.WithAPIKey(os.Getenv("ANTHROPIC_API_KEY")),
oaioption.WithBaseURL(os.Getenv("ANTHROPIC_BASE_URL")),
oaioption.WithMiddleware(otelopenai.Middleware("my-app",
otelopenai.WithCaptureInput(),
otelopenai.WithCaptureOutput(),
otelopenai.WithGenAISystem("anthropic"),
)),
)
response, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Model: "claude-4-5-sonnet",
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage("You are a helpful assistant."),
openai.UserMessage("Hello, Claude!"),
},
})
if err != nil {
log.Fatalf("Chat completion failed: %v", err)
}
log.Printf("Response: %s", response.Choices[0].Message.Content)
}
The defer tp.Shutdown(ctx) call is essential. Without it, traces buffered in memory will be lost when your application exits.