feat(api): update via SDK Studio (#1181)

This commit is contained in:
stainless-app[bot] 2024-07-15 01:23:53 +00:00 committed by stainless-bot
parent c08fb0db23
commit efafb96456
4 changed files with 684 additions and 94 deletions

View file

@ -1,2 +1,2 @@
configured_endpoints: 1256
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-9f6e9da01b27f4f387991ca14ecafe0c42a356cc3c47b269e5f8b4f6cd0ed700.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-cb76af21f6fcf485b7e41586c3076cd45d25d6d04971c77ec814523b894dcb97.yml

View file

@ -320,12 +320,64 @@ class AIResource(SyncAPIResource):
model_name: str,
*,
account_id: str,
prompt: str,
frequency_penalty: float | NotGiven = NOT_GIVEN,
lora: str | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN,
prompt: str | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
raw: bool | NotGiven = NOT_GIVEN,
repetition_penalty: float | NotGiven = NOT_GIVEN,
seed: int | NotGiven = NOT_GIVEN,
stream: bool | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
top_k: int | NotGiven = NOT_GIVEN,
top_p: float | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Optional[AIRunResponse]:
"""
This endpoint provides users with the capability to run specific AI models
on-demand.
By submitting the required input data, users can receive real-time predictions
or results generated by the chosen AI model. The endpoint supports various AI
model types, ensuring flexibility and adaptability for diverse use cases.
Model specific inputs available in
[Cloudflare Docs](https://developers.cloudflare.com/workers-ai/models/).
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
...
@overload
def run(
self,
model_name: str,
*,
account_id: str,
messages: Iterable[ai_run_params.Variant8Message],
frequency_penalty: float | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
repetition_penalty: float | NotGiven = NOT_GIVEN,
seed: int | NotGiven = NOT_GIVEN,
stream: bool | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN,
top_k: int | NotGiven = NOT_GIVEN,
top_p: float | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@ -478,6 +530,7 @@ class AIResource(SyncAPIResource):
["account_id", "audio"],
["account_id", "image"],
["account_id"],
["account_id", "messages"],
["account_id", "target_lang", "text"],
["account_id", "input_text"],
)
@ -502,16 +555,22 @@ class AIResource(SyncAPIResource):
strength: float | NotGiven = NOT_GIVEN,
width: int | NotGiven = NOT_GIVEN,
audio: Iterable[float] | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
lora: str | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
raw: bool | NotGiven = NOT_GIVEN,
repetition_penalty: float | NotGiven = NOT_GIVEN,
stream: bool | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
top_k: int | NotGiven = NOT_GIVEN,
top_p: float | NotGiven = NOT_GIVEN,
messages: Iterable[ai_run_params.Variant8Message] | NotGiven = NOT_GIVEN,
tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN,
target_lang: str | NotGiven = NOT_GIVEN,
source_lang: str | NotGiven = NOT_GIVEN,
input_text: str | NotGiven = NOT_GIVEN,
max_length: int | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@ -825,12 +884,64 @@ class AsyncAIResource(AsyncAPIResource):
model_name: str,
*,
account_id: str,
prompt: str,
frequency_penalty: float | NotGiven = NOT_GIVEN,
lora: str | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN,
prompt: str | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
raw: bool | NotGiven = NOT_GIVEN,
repetition_penalty: float | NotGiven = NOT_GIVEN,
seed: int | NotGiven = NOT_GIVEN,
stream: bool | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
top_k: int | NotGiven = NOT_GIVEN,
top_p: float | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Optional[AIRunResponse]:
"""
This endpoint provides users with the capability to run specific AI models
on-demand.
By submitting the required input data, users can receive real-time predictions
or results generated by the chosen AI model. The endpoint supports various AI
model types, ensuring flexibility and adaptability for diverse use cases.
Model specific inputs available in
[Cloudflare Docs](https://developers.cloudflare.com/workers-ai/models/).
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
...
@overload
async def run(
self,
model_name: str,
*,
account_id: str,
messages: Iterable[ai_run_params.Variant8Message],
frequency_penalty: float | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
repetition_penalty: float | NotGiven = NOT_GIVEN,
seed: int | NotGiven = NOT_GIVEN,
stream: bool | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN,
top_k: int | NotGiven = NOT_GIVEN,
top_p: float | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@ -983,6 +1094,7 @@ class AsyncAIResource(AsyncAPIResource):
["account_id", "audio"],
["account_id", "image"],
["account_id"],
["account_id", "messages"],
["account_id", "target_lang", "text"],
["account_id", "input_text"],
)
@ -1007,16 +1119,22 @@ class AsyncAIResource(AsyncAPIResource):
strength: float | NotGiven = NOT_GIVEN,
width: int | NotGiven = NOT_GIVEN,
audio: Iterable[float] | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
lora: str | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
raw: bool | NotGiven = NOT_GIVEN,
repetition_penalty: float | NotGiven = NOT_GIVEN,
stream: bool | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
top_k: int | NotGiven = NOT_GIVEN,
top_p: float | NotGiven = NOT_GIVEN,
messages: Iterable[ai_run_params.Variant8Message] | NotGiven = NOT_GIVEN,
tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN,
target_lang: str | NotGiven = NOT_GIVEN,
source_lang: str | NotGiven = NOT_GIVEN,
input_text: str | NotGiven = NOT_GIVEN,
max_length: int | NotGiven = NOT_GIVEN,
temperature: float | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,

View file

@ -2,7 +2,7 @@
from __future__ import annotations
from typing import List, Union, Iterable
from typing import Dict, List, Union, Iterable
from typing_extensions import Required, TypedDict
__all__ = [
@ -14,8 +14,13 @@ __all__ = [
"AutomaticSpeechRecognition",
"ImageClassification",
"ObjectDetection",
"TextGeneration",
"TextGenerationMessage",
"Variant7",
"Variant8",
"Variant8Message",
"Variant8Tool",
"Variant8ToolFunction",
"Variant8ToolFunctionParameters",
"Variant8ToolFunctionParametersProperties",
"Translation",
"Summarization",
"ImageToText",
@ -89,28 +94,94 @@ class ObjectDetection(TypedDict, total=False):
image: Iterable[float]
class TextGeneration(TypedDict, total=False):
class Variant7(TypedDict, total=False):
account_id: Required[str]
prompt: Required[str]
frequency_penalty: float
lora: str
max_tokens: int
messages: Iterable[TextGenerationMessage]
prompt: str
presence_penalty: float
raw: bool
repetition_penalty: float
seed: int
stream: bool
temperature: float
class TextGenerationMessage(TypedDict, total=False):
top_k: int
top_p: float
class Variant8(TypedDict, total=False):
account_id: Required[str]
messages: Required[Iterable[Variant8Message]]
frequency_penalty: float
max_tokens: int
presence_penalty: float
repetition_penalty: float
seed: int
stream: bool
temperature: float
tools: Iterable[Variant8Tool]
top_k: int
top_p: float
class Variant8Message(TypedDict, total=False):
content: Required[str]
role: Required[str]
class Variant8ToolFunctionParametersProperties(TypedDict, total=False):
description: str
type: str
class Variant8ToolFunctionParameters(TypedDict, total=False):
properties: Dict[str, Variant8ToolFunctionParametersProperties]
required: List[str]
type: str
class Variant8ToolFunction(TypedDict, total=False):
description: str
name: str
parameters: Variant8ToolFunctionParameters
class Variant8Tool(TypedDict, total=False):
function: Variant8ToolFunction
type: str
class Translation(TypedDict, total=False):
account_id: Required[str]
@ -159,7 +230,8 @@ AIRunParams = Union[
AutomaticSpeechRecognition,
ImageClassification,
ObjectDetection,
TextGeneration,
Variant7,
Variant8,
Translation,
Summarization,
ImageToText,

View file

@ -418,6 +418,7 @@ class TestAI:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -426,25 +427,18 @@ class TestAI:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
frequency_penalty=0,
lora="lora",
max_tokens=0,
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
prompt="x",
presence_penalty=0,
raw=True,
repetition_penalty=0,
seed=1,
stream=True,
temperature=0,
top_k=1,
top_p=0,
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -453,6 +447,7 @@ class TestAI:
response = client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
)
assert response.is_closed is True
@ -465,6 +460,7 @@ class TestAI:
with client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@ -480,12 +476,14 @@ class TestAI:
client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="",
prompt="x",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
client.workers.ai.with_raw_response.run(
model_name="",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
)
@parametrize
@ -493,8 +491,20 @@ class TestAI:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -503,9 +513,82 @@ class TestAI:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
source_lang="source_lang",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
frequency_penalty=0,
max_tokens=0,
presence_penalty=0,
repetition_penalty=0,
seed=1,
stream=True,
temperature=0,
tools=[
{
"function": {
"description": "description",
"name": "name",
"parameters": {
"properties": {
"foo": {
"description": "description",
"type": "type",
}
},
"required": ["string", "string", "string"],
"type": "type",
},
},
"type": "type",
},
{
"function": {
"description": "description",
"name": "name",
"parameters": {
"properties": {
"foo": {
"description": "description",
"type": "type",
}
},
"required": ["string", "string", "string"],
"type": "type",
},
},
"type": "type",
},
{
"function": {
"description": "description",
"name": "name",
"parameters": {
"properties": {
"foo": {
"description": "description",
"type": "type",
}
},
"required": ["string", "string", "string"],
"type": "type",
},
},
"type": "type",
},
],
top_k=1,
top_p=0,
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -514,8 +597,20 @@ class TestAI:
response = client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
assert response.is_closed is True
@ -528,8 +623,20 @@ class TestAI:
with client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@ -545,16 +652,40 @@ class TestAI:
client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
client.workers.ai.with_raw_response.run(
model_name="",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
@parametrize
@ -562,7 +693,8 @@ class TestAI:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
target_lang="target_lang",
text="x",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -571,8 +703,9 @@ class TestAI:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
max_length=0,
target_lang="target_lang",
text="x",
source_lang="source_lang",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -581,7 +714,8 @@ class TestAI:
response = client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
target_lang="target_lang",
text="x",
)
assert response.is_closed is True
@ -594,7 +728,8 @@ class TestAI:
with client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
target_lang="target_lang",
text="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@ -606,6 +741,71 @@ class TestAI:
@parametrize
def test_path_params_run_overload_10(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="",
target_lang="target_lang",
text="x",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
client.workers.ai.with_raw_response.run(
model_name="",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
)
@parametrize
def test_method_run_overload_11(self, client: Cloudflare) -> None:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
def test_method_run_with_all_params_overload_11(self, client: Cloudflare) -> None:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
max_length=0,
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
def test_raw_response_run_overload_11(self, client: Cloudflare) -> None:
response = client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
ai = response.parse()
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
def test_streaming_response_run_overload_11(self, client: Cloudflare) -> None:
with client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
ai = response.parse()
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_run_overload_11(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.workers.ai.with_raw_response.run(
model_name="model_name",
@ -621,7 +821,7 @@ class TestAI:
)
@parametrize
def test_method_run_overload_11(self, client: Cloudflare) -> None:
def test_method_run_overload_12(self, client: Cloudflare) -> None:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -630,7 +830,7 @@ class TestAI:
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
def test_method_run_with_all_params_overload_11(self, client: Cloudflare) -> None:
def test_method_run_with_all_params_overload_12(self, client: Cloudflare) -> None:
ai = client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -657,7 +857,7 @@ class TestAI:
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
def test_raw_response_run_overload_11(self, client: Cloudflare) -> None:
def test_raw_response_run_overload_12(self, client: Cloudflare) -> None:
response = client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -670,7 +870,7 @@ class TestAI:
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
def test_streaming_response_run_overload_11(self, client: Cloudflare) -> None:
def test_streaming_response_run_overload_12(self, client: Cloudflare) -> None:
with client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -685,7 +885,7 @@ class TestAI:
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_run_overload_11(self, client: Cloudflare) -> None:
def test_path_params_run_overload_12(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.workers.ai.with_raw_response.run(
model_name="model_name",
@ -1105,6 +1305,7 @@ class TestAsyncAI:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -1113,25 +1314,18 @@ class TestAsyncAI:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
frequency_penalty=0,
lora="lora",
max_tokens=0,
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
prompt="x",
presence_penalty=0,
raw=True,
repetition_penalty=0,
seed=1,
stream=True,
temperature=0,
top_k=1,
top_p=0,
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -1140,6 +1334,7 @@ class TestAsyncAI:
response = await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
)
assert response.is_closed is True
@ -1152,6 +1347,7 @@ class TestAsyncAI:
async with async_client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@ -1167,12 +1363,14 @@ class TestAsyncAI:
await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="",
prompt="x",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
await async_client.workers.ai.with_raw_response.run(
model_name="",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
prompt="x",
)
@parametrize
@ -1180,8 +1378,20 @@ class TestAsyncAI:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -1190,9 +1400,82 @@ class TestAsyncAI:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
source_lang="source_lang",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
frequency_penalty=0,
max_tokens=0,
presence_penalty=0,
repetition_penalty=0,
seed=1,
stream=True,
temperature=0,
tools=[
{
"function": {
"description": "description",
"name": "name",
"parameters": {
"properties": {
"foo": {
"description": "description",
"type": "type",
}
},
"required": ["string", "string", "string"],
"type": "type",
},
},
"type": "type",
},
{
"function": {
"description": "description",
"name": "name",
"parameters": {
"properties": {
"foo": {
"description": "description",
"type": "type",
}
},
"required": ["string", "string", "string"],
"type": "type",
},
},
"type": "type",
},
{
"function": {
"description": "description",
"name": "name",
"parameters": {
"properties": {
"foo": {
"description": "description",
"type": "type",
}
},
"required": ["string", "string", "string"],
"type": "type",
},
},
"type": "type",
},
],
top_k=1,
top_p=0,
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -1201,8 +1484,20 @@ class TestAsyncAI:
response = await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
assert response.is_closed is True
@ -1215,8 +1510,20 @@ class TestAsyncAI:
async with async_client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@ -1232,16 +1539,40 @@ class TestAsyncAI:
await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
await async_client.workers.ai.with_raw_response.run(
model_name="",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
messages=[
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
{
"content": "content",
"role": "role",
},
],
)
@parametrize
@ -1249,7 +1580,8 @@ class TestAsyncAI:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
target_lang="target_lang",
text="x",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -1258,8 +1590,9 @@ class TestAsyncAI:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
max_length=0,
target_lang="target_lang",
text="x",
source_lang="source_lang",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@ -1268,7 +1601,8 @@ class TestAsyncAI:
response = await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
target_lang="target_lang",
text="x",
)
assert response.is_closed is True
@ -1281,7 +1615,8 @@ class TestAsyncAI:
async with async_client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
target_lang="target_lang",
text="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@ -1293,6 +1628,71 @@ class TestAsyncAI:
@parametrize
async def test_path_params_run_overload_10(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="",
target_lang="target_lang",
text="x",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
await async_client.workers.ai.with_raw_response.run(
model_name="",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
target_lang="target_lang",
text="x",
)
@parametrize
async def test_method_run_overload_11(self, async_client: AsyncCloudflare) -> None:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
async def test_method_run_with_all_params_overload_11(self, async_client: AsyncCloudflare) -> None:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
max_length=0,
)
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
async def test_raw_response_run_overload_11(self, async_client: AsyncCloudflare) -> None:
response = await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
ai = await response.parse()
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
async def test_streaming_response_run_overload_11(self, async_client: AsyncCloudflare) -> None:
async with async_client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
input_text="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
ai = await response.parse()
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_run_overload_11(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
@ -1308,7 +1708,7 @@ class TestAsyncAI:
)
@parametrize
async def test_method_run_overload_11(self, async_client: AsyncCloudflare) -> None:
async def test_method_run_overload_12(self, async_client: AsyncCloudflare) -> None:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -1317,7 +1717,7 @@ class TestAsyncAI:
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
async def test_method_run_with_all_params_overload_11(self, async_client: AsyncCloudflare) -> None:
async def test_method_run_with_all_params_overload_12(self, async_client: AsyncCloudflare) -> None:
ai = await async_client.workers.ai.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -1344,7 +1744,7 @@ class TestAsyncAI:
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
async def test_raw_response_run_overload_11(self, async_client: AsyncCloudflare) -> None:
async def test_raw_response_run_overload_12(self, async_client: AsyncCloudflare) -> None:
response = await async_client.workers.ai.with_raw_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -1357,7 +1757,7 @@ class TestAsyncAI:
assert_matches_type(Optional[AIRunResponse], ai, path=["response"])
@parametrize
async def test_streaming_response_run_overload_11(self, async_client: AsyncCloudflare) -> None:
async def test_streaming_response_run_overload_12(self, async_client: AsyncCloudflare) -> None:
async with async_client.workers.ai.with_streaming_response.run(
model_name="model_name",
account_id="023e105f4ecef8ad9ca31a8372d0c353",
@ -1372,7 +1772,7 @@ class TestAsyncAI:
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_run_overload_11(self, async_client: AsyncCloudflare) -> None:
async def test_path_params_run_overload_12(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.workers.ai.with_raw_response.run(
model_name="model_name",