Here’s a minimal example using OpenAI. You can adapt the pattern to other providers like Anthropic or the Vercel AI SDK.
Copy
import { Persona } from '@kontext.dev/kontext-sdk';import OpenAI from 'openai';const persona = new Persona({ apiKey: process.env.KONTEXT_API_KEY });const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });async function run() { // 1) Fetch personalized context for a user const context = await persona.getContext({ userId: 'user_123', task: 'chat', }); // 2) Use the system prompt with your LLM const completion = await openai.chat.completions.create({ model: 'gpt-4o', messages: [ { role: 'system', content: context.systemPrompt }, { role: 'user', content: 'Say hello using my preferences.' }, ], }); console.log(completion.choices[0].message);}run();