mirror of
https://github.com/cloudflare/cloudflare-docs.git
synced 2026-01-11 20:06:58 +00:00
[AI GATEWAY] Generate examples dynamically via Madlib style natural language form (#27250)
This commit is contained in:
parent
0cee692eae
commit
b5ed38d088
6 changed files with 633 additions and 256 deletions
504
src/components/ai-gateway/code-example-selector.tsx
Normal file
504
src/components/ai-gateway/code-example-selector.tsx
Normal file
File diff suppressed because one or more lines are too long
112
src/components/ai-gateway/code-examples.astro
Normal file
112
src/components/ai-gateway/code-examples.astro
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
---
|
||||
import { CodeExample, CodeSelector, modelOptions, code, aiSDKUnifiedCode, aiSDKNativeCode, curlCode } from "./code-example-selector";
|
||||
import { Code } from "~/components";
|
||||
import { z } from "astro:schema";
|
||||
|
||||
|
||||
const props = z.object({
|
||||
forceClient: z.string().optional(),
|
||||
forceAPI: z.string().optional(),
|
||||
});
|
||||
|
||||
const { forceClient, forceAPI } = props.parse(Astro.props);
|
||||
---
|
||||
<style>
|
||||
.aig-code-example-container {
|
||||
background-color: var(--color-gray-200);
|
||||
border-color: var(--color-neutral-300);
|
||||
border-radius: 0.675rem;
|
||||
border-bottom-left-radius: 0.85rem;
|
||||
border-bottom-right-radius: 0.85rem;
|
||||
--selector-bg-color: var(--color-white);
|
||||
--selector-border-color: var(--color-neutral-300);
|
||||
--selector-text-color: var(--color-gray-900);
|
||||
--ec-brdRad: 0.675rem;
|
||||
--ec-brdCol: var(--color-neutral-300);
|
||||
&:where([data-theme=dark], [data-theme=dark] *) {
|
||||
background-color: var(--color-neutral-900);
|
||||
border-color: var(--color-neutral-700);
|
||||
--selector-bg-color: var(--color-neutral-700);
|
||||
--selector-border-color: var(--color-neutral-800);
|
||||
--selector-text-color: var(--color-gray-100);
|
||||
--ec-brdCol: var(--color-neutral-700);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
<div class="border aig-code-example-container">
|
||||
<CodeSelector client:load forceClient={forceClient} forceApiType={forceAPI} />
|
||||
{(!forceClient || forceClient === "openai-js") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="openai-js" keyType="stored" apiType="unified" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code lang="javascript" title="" code={code
|
||||
.replace("{provider}", option.provider)
|
||||
.replace("{model}", option.model)
|
||||
.replace(" {headerauth}\n","")}
|
||||
/>
|
||||
</CodeExample>
|
||||
))}
|
||||
{(!forceClient || forceClient === "openai-js") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="openai-js" keyType="in-request" apiType="unified" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code lang="javascript" title="" code={code
|
||||
.replace("{provider}", option.provider)
|
||||
.replace("{model}", option.model)
|
||||
.replace("{cf_api_token}", "{" + option.provider + "_api_token}")
|
||||
.replace(" {headerauth}\n"," defaultHeaders: {\n // if gateway is authenticated\n \"cf-aig-authorization\": `Bearer {cf_api_token}`, \n },\n")}
|
||||
/>
|
||||
</CodeExample>
|
||||
))}
|
||||
{(!forceClient || forceClient === "aisdk") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="aisdk" keyType="stored" apiType="unified" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code lang="javascript" title="" code={aiSDKUnifiedCode
|
||||
.replaceAll("{provider}", option.provider)
|
||||
.replaceAll("{apikey}", "")
|
||||
.replace("{model}", option.model)} />
|
||||
</CodeExample>
|
||||
))}
|
||||
{(!forceClient || forceClient === "aisdk") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="aisdk" keyType="in-request" apiType="unified" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code lang="javascript" title="" code={aiSDKUnifiedCode
|
||||
.replaceAll("{provider}", option.provider)
|
||||
.replaceAll("{apikey}", "{ apiKey: '{API_KEY}' }")
|
||||
.replace("{model}", option.model)} />
|
||||
</CodeExample>
|
||||
))}
|
||||
|
||||
{(!forceClient || forceClient === "aisdk") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="aisdk" keyType="stored" apiType="native" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code lang="javascript" title="" code={aiSDKNativeCode
|
||||
.replaceAll("{provider}", option.aiSDK.provider)
|
||||
.replaceAll("{providerFactory}", option.aiSDK.providerFactory)
|
||||
.replaceAll("{providerUsage}", option.aiSDK.providerUsage)
|
||||
.replaceAll("{apikey}", "")
|
||||
.replace("{model}", option.model)} />
|
||||
</CodeExample>
|
||||
))}
|
||||
{(!forceClient || forceClient === "aisdk") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="aisdk" keyType="in-request" apiType="native" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code lang="javascript" title="" code={aiSDKNativeCode
|
||||
.replaceAll("{provider}", option.aiSDK.provider)
|
||||
.replaceAll("{providerFactory}", option.aiSDK.providerFactory)
|
||||
.replaceAll("{providerUsage}", option.aiSDK.providerUsage)
|
||||
.replaceAll("{apikey}", "{ apiKey: '{API_KEY}' }")
|
||||
.replace("{model}", option.model)} />
|
||||
</CodeExample>
|
||||
))}
|
||||
|
||||
{(!forceClient || forceClient === "curl") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="curl" keyType="stored" apiType="unified" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code title="" lang="curl" code={curlCode
|
||||
.replaceAll("{provider}", option.provider)
|
||||
.replace("{model}", option.model)
|
||||
.replace("{headerauth}\n","")}
|
||||
/>
|
||||
</CodeExample>
|
||||
))}
|
||||
{(!forceClient || forceClient === "curl") && modelOptions.map((option) => (
|
||||
<CodeExample client:load provider={option.provider} clientType="curl" keyType="in-request" apiType="unified" forceClientType={forceClient} forceApiType={forceAPI}>
|
||||
<Code title="" lang="curl" code={curlCode
|
||||
.replaceAll("{provider}", option.provider)
|
||||
.replace("{model}", option.model)
|
||||
.replace("{headerauth}\n"," --header 'Authorization: Bearer {" + option.provider + "_api_token}' \\\n")} />
|
||||
</CodeExample>
|
||||
))}
|
||||
</div>
|
||||
|
|
@ -15,13 +15,13 @@ import {
|
|||
Tabs,
|
||||
Badge,
|
||||
} from "~/components";
|
||||
|
||||
import CodeSnippets from "~/components/ai-gateway/code-examples.astro";
|
||||
|
||||
In this guide, you will learn how to create and use your first AI Gateway.
|
||||
|
||||
<Render file="create-gateway" product="ai-gateway" />
|
||||
|
||||
### Authenticated gateway
|
||||
### Authenticated gateway 🔒
|
||||
|
||||
When you enable authentication on gateway each request is required to include a valid Cloudflare API token, adding an extra layer of security. We recommend using an authenticated gateway to prevent unauthorized access. [Learn more](/ai-gateway/configuration/authentication/).
|
||||
|
||||
|
|
@ -30,22 +30,22 @@ When you enable authentication on gateway each request is required to include a
|
|||
Authenticate with your upstream AI provider using one of the following options:
|
||||
|
||||
- **Unified Billing:** Use the AI Gateway billing to pay for and authenticate your inference requests. Refer to [Unified Billing](/ai-gateway/features/unified-billing/).
|
||||
- **BYOK (Store Keys):** Store your API Keys with Cloudflare, and AI Gateway will include them at runtime. Refer to [BYOK](/ai-gateway/configuration/bring-your-own-keys/).
|
||||
- **Request headers:** Include your provider key in the request headers as you normally would (for example, `Authorization: Bearer <OPENAI_API_KEY>`).
|
||||
- **BYOK (Store Keys):** Store your own provider API Keys with Cloudflare, and AI Gateway will include them at runtime. Refer to [BYOK](/ai-gateway/configuration/bring-your-own-keys/).
|
||||
- **Request headers:** Include your provider API Key in the request headers as you normally would (for example, `Authorization: Bearer <OPENAI_API_KEY>`).
|
||||
|
||||
## Integration Options
|
||||
|
||||
### Unified API (OpenAI-Compatible) Endpoint
|
||||
### Unified API Endpoint
|
||||
|
||||
<Badge text="recommended" variant="success" size="small" />
|
||||
<Badge text="OpenAI Compatible" variant="tip" size="small" /><Badge text="Recommended" variant="success" size="small" />
|
||||
<br />
|
||||
<br />
|
||||
|
||||
The easiest way to get started with AI Gateway is through our OpenAI-compatible `/chat/completions` endpoint. This allows you to use existing OpenAI SDKs and tools with minimal code changes while gaining access to multiple AI providers.
|
||||
|
||||
```txt ins="{account_id}" ins="{gateway_name}"
|
||||
`
|
||||
https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_name}/compat/chat/completions
|
||||
```
|
||||
`
|
||||
|
||||
**Key benefits:**
|
||||
|
||||
|
|
@ -53,69 +53,10 @@ https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_name}/compat/chat/com
|
|||
- Switch between providers by changing the `model` parameter
|
||||
- Dynamic Routing - Define complex routing scenarios requiring conditional logic, conduct A/B tests, set rate / budget limits, etc
|
||||
|
||||
|
||||
#### Example:
|
||||
|
||||
<Details header="With Key in Request">
|
||||
<Tabs>
|
||||
<TabItem label="With Authenticated Gateway">
|
||||
```js title=""
|
||||
import OpenAI from "openai";
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey: "YOUR_PROVIDER_API_KEY",
|
||||
defaultHeaders: {
|
||||
"cf-aig-authorization": `Bearer {cf_api_token}`,
|
||||
},
|
||||
baseURL:
|
||||
"https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat",
|
||||
});
|
||||
|
||||
// Use different providers by changing the model parameter
|
||||
const response = await client.chat.completions.create({
|
||||
model: "google-ai-studio/gemini-2.5-flash", // or "openai/gpt-5-mini", "anthropic/claude-sonnet-4-5"
|
||||
messages: [{ role: "user", content: "Hello, world!" }],
|
||||
});
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem label="Unauthenticated Gateway">
|
||||
```js title=""
|
||||
import OpenAI from "openai";
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey: "YOUR_PROVIDER_API_KEY",
|
||||
baseURL:
|
||||
"https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat",
|
||||
});
|
||||
|
||||
// Use different providers by changing the model parameter
|
||||
const response = await client.chat.completions.create({
|
||||
model: "google-ai-studio/gemini-2.5-flash", // or "openai/gpt-5-mini", "anthropic/claude-sonnet-4-5"
|
||||
messages: [{ role: "user", content: "Hello, world!" }],
|
||||
});
|
||||
```
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</Details>
|
||||
<Details header="With Stored Keys (BYOK) / Unified Billing" open>
|
||||
```js title="OpenAI JS SDK"
|
||||
import OpenAI from "openai";
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey: "{cf_api_token}",
|
||||
baseURL:
|
||||
"https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat",
|
||||
});
|
||||
|
||||
// Ensure either your LLM Keys are stored with BYOK
|
||||
// or Unified Billing has credits
|
||||
const response = await client.chat.completions.create({
|
||||
// Use different providers by changing the model parameter
|
||||
model: "google-ai-studio/gemini-2.5-flash", // or "openai/gpt-5-mini"
|
||||
messages: [{ role: "user", content: "Hello, world!" }],
|
||||
});
|
||||
```
|
||||
</Details>
|
||||
|
||||
<CodeSnippets forceAPI="unified" />
|
||||
|
||||
|
||||
Refer to [Unified API](/ai-gateway/usage/chat-completion/) to learn more about OpenAI compatibility.
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import {
|
|||
Tabs,
|
||||
TabItem
|
||||
} from "~/components";
|
||||
import CodeSnippets from "~/components/ai-gateway/code-examples.astro";
|
||||
|
||||
The [Vercel AI SDK](https://sdk.vercel.ai/) is a TypeScript library for building AI applications. The SDK supports many different AI providers, tools for streaming completions, and more.
|
||||
To use Cloudflare AI Gateway with Vercel AI SDK, you will need to use the `ai-gateway-provider` package.
|
||||
|
|
@ -20,71 +21,9 @@ To use Cloudflare AI Gateway with Vercel AI SDK, you will need to use the `ai-ga
|
|||
npm install ai-gateway-provider
|
||||
```
|
||||
|
||||
## Examples
|
||||
## Examples
|
||||
|
||||
<Details header="With Key in Request">
|
||||
<Tabs>
|
||||
<TabItem label="With Authenticated Gateway">
|
||||
```js title=""
|
||||
import { createAiGateway } from 'ai-gateway-provider';
|
||||
import { createOpenAI } from 'ai-gateway-provider/providers/openai';
|
||||
import { generateText } from "ai";
|
||||
|
||||
const aigateway = createAiGateway({
|
||||
accountId: "{CLOUDFLARE_ACCOUNT_ID}",
|
||||
gateway: '{GATEWAY_NAME}',
|
||||
apiKey: '{CF_AIG_TOKEN}',
|
||||
});
|
||||
|
||||
const openai = createOpenAI({ apiKey: '{OPENAI_API_KEY}' });
|
||||
|
||||
const { text } = await generateText({
|
||||
model: aigateway(openai.chat("gpt-5.1")),
|
||||
prompt: 'Write a vegetarian lasagna recipe for 4 people.',
|
||||
});
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem label="Unauthenticated Gateway">
|
||||
```js title=""
|
||||
import { createAiGateway } from 'ai-gateway-provider';
|
||||
import { createOpenAI } from 'ai-gateway-provider/providers/openai';
|
||||
import { generateText } from "ai";
|
||||
|
||||
const aigateway = createAiGateway({
|
||||
accountId: "{CLOUDFLARE_ACCOUNT_ID}",
|
||||
gateway: '{GATEWAY_NAME}',
|
||||
});
|
||||
|
||||
const openai = createOpenAI({ apiKey: '{OPENAI_API_KEY}' });
|
||||
|
||||
const { text } = await generateText({
|
||||
model: aigateway(openai.chat("gpt-5.1")),
|
||||
prompt: 'Write a vegetarian lasagna recipe for 4 people.',
|
||||
});
|
||||
```
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</Details>
|
||||
<Details header="With Stored Keys (BYOK) / Unified Billing" open>
|
||||
```js title=""
|
||||
import { createAiGateway } from 'ai-gateway-provider';
|
||||
import { createOpenAI } from 'ai-gateway-provider/providers/openai';
|
||||
import { generateText } from "ai";
|
||||
|
||||
const aigateway = createAiGateway({
|
||||
accountId: "{CLOUDFLARE_ACCOUNT_ID}",
|
||||
gateway: '{GATEWAY_NAME}',
|
||||
apiKey: '{CF_AIG_TOKEN}',
|
||||
});
|
||||
|
||||
const openai = createOpenAI();
|
||||
|
||||
const { text } = await generateText({
|
||||
model: aigateway(openai.chat("gpt-5.1")),
|
||||
prompt: 'Write a vegetarian lasagna recipe for 4 people.',
|
||||
});
|
||||
```
|
||||
</Details>
|
||||
<CodeSnippets forceClient="aisdk" />
|
||||
|
||||
### Fallback Providers
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import {
|
|||
Tabs,
|
||||
TabItem
|
||||
} from "~/components";
|
||||
import CodeSnippets from "~/components/ai-gateway/code-examples.astro";
|
||||
|
||||
Cloudflare's AI Gateway offers an OpenAI-compatible `/chat/completions` endpoint, enabling integration with multiple AI providers using a single URL. This feature simplifies the integration process, allowing for seamless switching between different models without significant code modifications.
|
||||
|
||||
|
|
@ -35,125 +36,7 @@ Specify the model using `{provider}/{model}` format. For example:
|
|||
|
||||
## Examples
|
||||
|
||||
### OpenAI SDK
|
||||
|
||||
<Details header="With Key in Request">
|
||||
<Tabs>
|
||||
<TabItem label="With Authenticated Gateway">
|
||||
```js title=""
|
||||
import OpenAI from "openai";
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey: "YOUR_PROVIDER_API_KEY",
|
||||
defaultHeaders: {
|
||||
"cf-aig-authorization": `Bearer {cf_api_token}`,
|
||||
},
|
||||
baseURL:
|
||||
"https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat",
|
||||
});
|
||||
|
||||
// Use different providers by changing the model parameter
|
||||
const response = await client.chat.completions.create({
|
||||
model: "google-ai-studio/gemini-2.5-flash", // or "openai/gpt-5-mini", "anthropic/claude-sonnet-4-5"
|
||||
messages: [{ role: "user", content: "Hello, world!" }],
|
||||
});
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem label="Unauthenticated Gateway">
|
||||
```js title=""
|
||||
import OpenAI from "openai";
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey: "YOUR_PROVIDER_API_KEY",
|
||||
baseURL:
|
||||
"https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat",
|
||||
});
|
||||
|
||||
// Use different providers by changing the model parameter
|
||||
const response = await client.chat.completions.create({
|
||||
model: "google-ai-studio/gemini-2.5-flash", // or "openai/gpt-5-mini", "anthropic/claude-sonnet-4-5"
|
||||
messages: [{ role: "user", content: "Hello, world!" }],
|
||||
});
|
||||
```
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</Details>
|
||||
<Details header="With Stored Keys (BYOK) / Unified Billing" open>
|
||||
```js title="OpenAI JS SDK"
|
||||
import OpenAI from "openai";
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey: "{cf_api_token}",
|
||||
baseURL:
|
||||
"https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat",
|
||||
});
|
||||
|
||||
// Ensure either your LLM Keys are stored with BYOK
|
||||
// or Unified Billing has credits
|
||||
const response = await client.chat.completions.create({
|
||||
// Use different providers by changing the model parameter
|
||||
model: "google-ai-studio/gemini-2.5-flash", // or "openai/gpt-5-mini"
|
||||
messages: [{ role: "user", content: "Hello, world!" }],
|
||||
});
|
||||
```
|
||||
</Details>
|
||||
|
||||
### cURL
|
||||
|
||||
|
||||
<Details header="With Key in Request">
|
||||
<Tabs>
|
||||
<TabItem label="With Authenticated Gateway">
|
||||
```bash title=""
|
||||
curl -X POST https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat/chat/completions \
|
||||
--header 'Authorization: Bearer {GOOGLE_GENERATIVE_AI_API_KEY}' \
|
||||
--header 'cf-aig-authorization: Bearer {CF_AIG_TOKEN}' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"model": "google-ai-studio/gemini-2.5-flash",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is Cloudflare?"
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem label="Unauthenticated Gateway">
|
||||
```bash title=""
|
||||
curl -X POST https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat/chat/completions \
|
||||
--header 'Authorization: Bearer {GOOGLE_GENERATIVE_AI_API_KEY}' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"model": "google-ai-studio/gemini-2.5-flash",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is Cloudflare?"
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</Details>
|
||||
<Details header="With Stored Keys (BYOK) / Unified Billing" open>
|
||||
```bash title=""
|
||||
curl -X POST https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/compat/chat/completions \
|
||||
--header 'cf-aig-authorization: Bearer {CF_AIG_TOKEN}' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"model": "google-ai-studio/gemini-2.5-flash",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is Cloudflare?"
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
</Details>
|
||||
<CodeSnippets forceAPI="unified" />
|
||||
|
||||
## Supported Providers
|
||||
|
||||
|
|
|
|||
|
|
@ -2,14 +2,12 @@
|
|||
{}
|
||||
---
|
||||
|
||||
import { TabItem, Tabs, LinkButton } from "~/components";
|
||||
import { TabItem, Tabs, DashButton } from "~/components";
|
||||
|
||||
<Tabs syncKey="dashPlusAPI">
|
||||
<TabItem label="Dashboard">
|
||||
|
||||
<LinkButton href="https://dash.cloudflare.com/?to=/:account/ai/ai-gateway#create">
|
||||
Create a Gateway
|
||||
</LinkButton>
|
||||
<DashButton url="/?to=/:account/ai/ai-gateway" />
|
||||
|
||||
1. Log into the [Cloudflare dashboard](https://dash.cloudflare.com/) and select your account.
|
||||
2. Go to **AI** > **AI Gateway**.
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue