import { Observ } from "observ-sdk";import { openai } from "@ai-sdk/openai";import { generateText } from "ai";const observ = new Observ({ apiKey: "your-observ-api-key", recall: true, // Enable semantic caching});// Wrap the modelconst model = observ.wrap(openai("gpt-4"));// Use it normallyconst result = await generateText({ model, prompt: "What is TypeScript?",});console.log(result.text);
Copy
import { Observ } from "observ-sdk";import { anthropic } from "@ai-sdk/anthropic";import { generateText } from "ai";const observ = new Observ({ apiKey: "your-observ-api-key", recall: true,});const model = observ.wrap(anthropic("claude-sonnet-4-20250514"));const result = await generateText({ model, prompt: "What is TypeScript?",});console.log(result.text);
Copy
import { Observ } from "observ-sdk";import { google } from "@ai-sdk/google";import { generateText } from "ai";const observ = new Observ({ apiKey: "your-observ-api-key", recall: true,});const model = observ.wrap(google("gemini-1.5-pro"));const result = await generateText({ model, prompt: "What is TypeScript?",});console.log(result.text);
Copy
import { Observ } from "observ-sdk";import { mistral } from "@ai-sdk/mistral";import { generateText } from "ai";const observ = new Observ({ apiKey: "your-observ-api-key", recall: true,});const model = observ.wrap(mistral("mistral-large-latest"));const result = await generateText({ model, prompt: "What is TypeScript?",});console.log(result.text);
Copy
import { Observ } from "observ-sdk";import { cohere } from "@ai-sdk/cohere";import { generateText } from "ai";const observ = new Observ({ apiKey: "your-observ-api-key", recall: true,});const model = observ.wrap(cohere("command-r-plus"));const result = await generateText({ model, prompt: "What is TypeScript?",});console.log(result.text);
import { streamText } from "ai";// Use any wrapped modelconst stream = await streamText({ model, // Your wrapped model prompt: "Write a detailed explanation of async/await",});// Stream chunks to the clientfor await (const chunk of stream.textStream) { process.stdout.write(chunk);}
Streaming requests are fully traced in Observ, including latency metrics for
each chunk.