Skip to main content
Use the OpenStack OpenAI-compatible gateway as a drop-in model backend for the OpenRouter SDK.
import { createOpenRouter } from '@openrouter/ai-sdk-provider'
import { streamText } from 'ai'

const openrouter = createOpenRouter({
  apiKey: process.env.OPENSTACK_API_KEY, // Your OpenStack API key
  baseURL: 'https://api.openstack.ai/v1', // Change baseURL to OpenStack
  headers: {
    'X-Openstack-User': 'user_123', // Stable, pseudonymous user ID
  },
})
const model = openrouter('anthropic/claude-3.7-sonnet:thinking')
await streamText({
  model,
  messages: [{ role: 'user', content: 'Hello' }],
})