LangWatch supports tracing Grok (xAI) API calls using the same otelopenai middleware used for OpenAI. Configure the client to point to the xAI endpoint.
Installation
go get github.com/langwatch/langwatch/sdk-go github.com/openai/openai-go
Usage
Set LANGWATCH_API_KEY and XAI_API_KEY environment variables before running.
package main
import (
"context"
"log"
"os"
langwatch "github.com/langwatch/langwatch/sdk-go"
otelopenai "github.com/langwatch/langwatch/sdk-go/instrumentation/openai"
"github.com/openai/openai-go"
oaioption "github.com/openai/openai-go/option"
"go.opentelemetry.io/otel"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
)
func main() {
ctx := context.Background()
// Set up LangWatch exporter
exporter, err := langwatch.NewDefaultExporter(ctx)
if err != nil {
log.Fatalf("failed to create exporter: %v", err)
}
tp := sdktrace.NewTracerProvider(sdktrace.WithBatcher(exporter))
otel.SetTracerProvider(tp)
defer tp.Shutdown(ctx) // Critical: ensures traces are flushed
// Create Grok client via OpenAI-compatible API
client := openai.NewClient(
oaioption.WithAPIKey(os.Getenv("XAI_API_KEY")),
oaioption.WithBaseURL("https://api.grok.com/v1"),
oaioption.WithMiddleware(otelopenai.Middleware("my-app",
otelopenai.WithCaptureInput(),
otelopenai.WithCaptureOutput(),
otelopenai.WithGenAISystem("xai"),
)),
)
response, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Model: "grok-4-latest",
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage("You are a helpful assistant."),
openai.UserMessage("Hello, Grok!"),
},
})
if err != nil {
log.Fatalf("Chat completion failed: %v", err)
}
log.Printf("Response: %s", response.Choices[0].Message.Content)
}
The defer tp.Shutdown(ctx) call is essential. Without it, traces buffered in memory will be lost when your application exits.