feat: feat(api): RAG-395: add AI Search endpoints
Some checks failed
CI / lint (push) Has been cancelled
CI / build (push) Has been cancelled
CI / test (push) Has been cancelled
CI / examples (push) Has been cancelled

* feat(api): RAG-395: fix ai search casing

* feat(api): RAG-395: re-enable search and chat completions endpoints for ai search

* feat(api): RAG-395: add more AI Search endpoints

* feat(api): RAG-395: add remaining ai search token endpoints and temporarily disable chat completions and search

* feat(api): RAG-395: add AI Search endpoints
This commit is contained in:
stainless-app[bot] 2025-12-12 17:51:35 +00:00
parent 2f1981ab77
commit 43a4557509
43 changed files with 7258 additions and 2 deletions

View file

@ -1,4 +1,4 @@
configured_endpoints: 1925
configured_endpoints: 1940
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-5fc91fe703941755eabe8e53f6d53056d31c38bff2f098dfa2389512e52b586f.yml
openapi_spec_hash: 7d4707f46e5b07408d6a083bfe164f51
config_hash: 93c66810e920a180099213f0e36aacbd
config_hash: 3e9cdaaf8fbff19639d218f89b85a116

81
api.md
View file

@ -10302,6 +10302,87 @@ Methods:
- <code title="get /accounts/{account_id}/ai/tomarkdown/supported">client.ai.to_markdown.<a href="./src/cloudflare/resources/ai/to_markdown.py">supported</a>(\*, account_id) -> <a href="./src/cloudflare/types/ai/to_markdown_supported_response.py">SyncSinglePage[ToMarkdownSupportedResponse]</a></code>
- <code title="post /accounts/{account_id}/ai/tomarkdown">client.ai.to_markdown.<a href="./src/cloudflare/resources/ai/to_markdown.py">transform</a>(file, \*, account_id, \*\*<a href="src/cloudflare/types/ai/to_markdown_transform_params.py">params</a>) -> <a href="./src/cloudflare/types/ai/to_markdown_transform_response.py">SyncSinglePage[ToMarkdownTransformResponse]</a></code>
# AISearch
## Instances
Types:
```python
from cloudflare.types.aisearch import (
InstanceCreateResponse,
InstanceUpdateResponse,
InstanceListResponse,
InstanceDeleteResponse,
InstanceReadResponse,
InstanceStatsResponse,
)
```
Methods:
- <code title="post /accounts/{account_id}/ai-search/instances">client.aisearch.instances.<a href="./src/cloudflare/resources/aisearch/instances/instances.py">create</a>(\*, account_id, \*\*<a href="src/cloudflare/types/aisearch/instance_create_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/instance_create_response.py">InstanceCreateResponse</a></code>
- <code title="put /accounts/{account_id}/ai-search/instances/{id}">client.aisearch.instances.<a href="./src/cloudflare/resources/aisearch/instances/instances.py">update</a>(id, \*, account_id, \*\*<a href="src/cloudflare/types/aisearch/instance_update_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/instance_update_response.py">InstanceUpdateResponse</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances">client.aisearch.instances.<a href="./src/cloudflare/resources/aisearch/instances/instances.py">list</a>(\*, account_id, \*\*<a href="src/cloudflare/types/aisearch/instance_list_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/instance_list_response.py">SyncV4PagePaginationArray[InstanceListResponse]</a></code>
- <code title="delete /accounts/{account_id}/ai-search/instances/{id}">client.aisearch.instances.<a href="./src/cloudflare/resources/aisearch/instances/instances.py">delete</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/instance_delete_response.py">InstanceDeleteResponse</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances/{id}">client.aisearch.instances.<a href="./src/cloudflare/resources/aisearch/instances/instances.py">read</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/instance_read_response.py">InstanceReadResponse</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances/{id}/stats">client.aisearch.instances.<a href="./src/cloudflare/resources/aisearch/instances/instances.py">stats</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/instance_stats_response.py">InstanceStatsResponse</a></code>
### Items
Types:
```python
from cloudflare.types.aisearch.instances import ItemListResponse, ItemGetResponse
```
Methods:
- <code title="get /accounts/{account_id}/ai-search/instances/{id}/items">client.aisearch.instances.items.<a href="./src/cloudflare/resources/aisearch/instances/items.py">list</a>(id, \*, account_id, \*\*<a href="src/cloudflare/types/aisearch/instances/item_list_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/instances/item_list_response.py">SyncV4PagePaginationArray[ItemListResponse]</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances/{id}/items/{item_id}">client.aisearch.instances.items.<a href="./src/cloudflare/resources/aisearch/instances/items.py">get</a>(item_id, \*, account_id, id) -> <a href="./src/cloudflare/types/aisearch/instances/item_get_response.py">ItemGetResponse</a></code>
### Jobs
Types:
```python
from cloudflare.types.aisearch.instances import (
JobCreateResponse,
JobListResponse,
JobGetResponse,
JobLogsResponse,
)
```
Methods:
- <code title="post /accounts/{account_id}/ai-search/instances/{id}/jobs">client.aisearch.instances.jobs.<a href="./src/cloudflare/resources/aisearch/instances/jobs.py">create</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/instances/job_create_response.py">JobCreateResponse</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances/{id}/jobs">client.aisearch.instances.jobs.<a href="./src/cloudflare/resources/aisearch/instances/jobs.py">list</a>(id, \*, account_id, \*\*<a href="src/cloudflare/types/aisearch/instances/job_list_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/instances/job_list_response.py">SyncV4PagePaginationArray[JobListResponse]</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances/{id}/jobs/{job_id}">client.aisearch.instances.jobs.<a href="./src/cloudflare/resources/aisearch/instances/jobs.py">get</a>(job_id, \*, account_id, id) -> <a href="./src/cloudflare/types/aisearch/instances/job_get_response.py">JobGetResponse</a></code>
- <code title="get /accounts/{account_id}/ai-search/instances/{id}/jobs/{job_id}/logs">client.aisearch.instances.jobs.<a href="./src/cloudflare/resources/aisearch/instances/jobs.py">logs</a>(job_id, \*, account_id, id, \*\*<a href="src/cloudflare/types/aisearch/instances/job_logs_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/instances/job_logs_response.py">JobLogsResponse</a></code>
## Tokens
Types:
```python
from cloudflare.types.aisearch import (
TokenCreateResponse,
TokenUpdateResponse,
TokenListResponse,
TokenDeleteResponse,
TokenReadResponse,
)
```
Methods:
- <code title="post /accounts/{account_id}/ai-search/tokens">client.aisearch.tokens.<a href="./src/cloudflare/resources/aisearch/tokens.py">create</a>(\*, account_id, \*\*<a href="src/cloudflare/types/aisearch/token_create_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/token_create_response.py">TokenCreateResponse</a></code>
- <code title="delete /accounts/{account_id}/ai-search/tokens/{id}">client.aisearch.tokens.<a href="./src/cloudflare/resources/aisearch/tokens.py">update</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/token_update_response.py">TokenUpdateResponse</a></code>
- <code title="get /accounts/{account_id}/ai-search/tokens">client.aisearch.tokens.<a href="./src/cloudflare/resources/aisearch/tokens.py">list</a>(\*, account_id, \*\*<a href="src/cloudflare/types/aisearch/token_list_params.py">params</a>) -> <a href="./src/cloudflare/types/aisearch/token_list_response.py">SyncV4PagePaginationArray[TokenListResponse]</a></code>
- <code title="delete /accounts/{account_id}/ai-search/tokens/{id}">client.aisearch.tokens.<a href="./src/cloudflare/resources/aisearch/tokens.py">delete</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/token_delete_response.py">TokenDeleteResponse</a></code>
- <code title="delete /accounts/{account_id}/ai-search/tokens/{id}">client.aisearch.tokens.<a href="./src/cloudflare/resources/aisearch/tokens.py">read</a>(id, \*, account_id) -> <a href="./src/cloudflare/types/aisearch/token_read_response.py">TokenReadResponse</a></code>
# SecurityCenter
## Insights

View file

@ -65,6 +65,7 @@ if TYPE_CHECKING:
logpush,
workers,
accounts,
aisearch,
alerting,
firewall,
rulesets,
@ -173,6 +174,7 @@ if TYPE_CHECKING:
from .resources.logpush.logpush import LogpushResource, AsyncLogpushResource
from .resources.workers.workers import WorkersResource, AsyncWorkersResource
from .resources.accounts.accounts import AccountsResource, AsyncAccountsResource
from .resources.aisearch.aisearch import AISearchResource, AsyncAISearchResource
from .resources.alerting.alerting import AlertingResource, AsyncAlertingResource
from .resources.firewall.firewall import FirewallResource, AsyncFirewallResource
from .resources.rulesets.rulesets import RulesetsResource, AsyncRulesetsResource
@ -903,6 +905,12 @@ class Cloudflare(SyncAPIClient):
return AIResource(self)
@cached_property
def aisearch(self) -> AISearchResource:
from .resources.aisearch import AISearchResource
return AISearchResource(self)
@cached_property
def security_center(self) -> SecurityCenterResource:
from .resources.security_center import SecurityCenterResource
@ -1759,6 +1767,12 @@ class AsyncCloudflare(AsyncAPIClient):
return AsyncAIResource(self)
@cached_property
def aisearch(self) -> AsyncAISearchResource:
from .resources.aisearch import AsyncAISearchResource
return AsyncAISearchResource(self)
@cached_property
def security_center(self) -> AsyncSecurityCenterResource:
from .resources.security_center import AsyncSecurityCenterResource
@ -2545,6 +2559,12 @@ class CloudflareWithRawResponse:
return AIResourceWithRawResponse(self._client.ai)
@cached_property
def aisearch(self) -> aisearch.AISearchResourceWithRawResponse:
from .resources.aisearch import AISearchResourceWithRawResponse
return AISearchResourceWithRawResponse(self._client.aisearch)
@cached_property
def security_center(self) -> security_center.SecurityCenterResourceWithRawResponse:
from .resources.security_center import SecurityCenterResourceWithRawResponse
@ -3148,6 +3168,12 @@ class AsyncCloudflareWithRawResponse:
return AsyncAIResourceWithRawResponse(self._client.ai)
@cached_property
def aisearch(self) -> aisearch.AsyncAISearchResourceWithRawResponse:
from .resources.aisearch import AsyncAISearchResourceWithRawResponse
return AsyncAISearchResourceWithRawResponse(self._client.aisearch)
@cached_property
def security_center(self) -> security_center.AsyncSecurityCenterResourceWithRawResponse:
from .resources.security_center import AsyncSecurityCenterResourceWithRawResponse
@ -3751,6 +3777,12 @@ class CloudflareWithStreamedResponse:
return AIResourceWithStreamingResponse(self._client.ai)
@cached_property
def aisearch(self) -> aisearch.AISearchResourceWithStreamingResponse:
from .resources.aisearch import AISearchResourceWithStreamingResponse
return AISearchResourceWithStreamingResponse(self._client.aisearch)
@cached_property
def security_center(self) -> security_center.SecurityCenterResourceWithStreamingResponse:
from .resources.security_center import SecurityCenterResourceWithStreamingResponse
@ -4364,6 +4396,12 @@ class AsyncCloudflareWithStreamedResponse:
return AsyncAIResourceWithStreamingResponse(self._client.ai)
@cached_property
def aisearch(self) -> aisearch.AsyncAISearchResourceWithStreamingResponse:
from .resources.aisearch import AsyncAISearchResourceWithStreamingResponse
return AsyncAISearchResourceWithStreamingResponse(self._client.aisearch)
@cached_property
def security_center(self) -> security_center.AsyncSecurityCenterResourceWithStreamingResponse:
from .resources.security_center import AsyncSecurityCenterResourceWithStreamingResponse

View file

@ -0,0 +1,47 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .tokens import (
TokensResource,
AsyncTokensResource,
TokensResourceWithRawResponse,
AsyncTokensResourceWithRawResponse,
TokensResourceWithStreamingResponse,
AsyncTokensResourceWithStreamingResponse,
)
from .aisearch import (
AISearchResource,
AsyncAISearchResource,
AISearchResourceWithRawResponse,
AsyncAISearchResourceWithRawResponse,
AISearchResourceWithStreamingResponse,
AsyncAISearchResourceWithStreamingResponse,
)
from .instances import (
InstancesResource,
AsyncInstancesResource,
InstancesResourceWithRawResponse,
AsyncInstancesResourceWithRawResponse,
InstancesResourceWithStreamingResponse,
AsyncInstancesResourceWithStreamingResponse,
)
__all__ = [
"InstancesResource",
"AsyncInstancesResource",
"InstancesResourceWithRawResponse",
"AsyncInstancesResourceWithRawResponse",
"InstancesResourceWithStreamingResponse",
"AsyncInstancesResourceWithStreamingResponse",
"TokensResource",
"AsyncTokensResource",
"TokensResourceWithRawResponse",
"AsyncTokensResourceWithRawResponse",
"TokensResourceWithStreamingResponse",
"AsyncTokensResourceWithStreamingResponse",
"AISearchResource",
"AsyncAISearchResource",
"AISearchResourceWithRawResponse",
"AsyncAISearchResourceWithRawResponse",
"AISearchResourceWithStreamingResponse",
"AsyncAISearchResourceWithStreamingResponse",
]

View file

@ -0,0 +1,134 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from .tokens import (
TokensResource,
AsyncTokensResource,
TokensResourceWithRawResponse,
AsyncTokensResourceWithRawResponse,
TokensResourceWithStreamingResponse,
AsyncTokensResourceWithStreamingResponse,
)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from .instances.instances import (
InstancesResource,
AsyncInstancesResource,
InstancesResourceWithRawResponse,
AsyncInstancesResourceWithRawResponse,
InstancesResourceWithStreamingResponse,
AsyncInstancesResourceWithStreamingResponse,
)
__all__ = ["AISearchResource", "AsyncAISearchResource"]
class AISearchResource(SyncAPIResource):
@cached_property
def instances(self) -> InstancesResource:
return InstancesResource(self._client)
@cached_property
def tokens(self) -> TokensResource:
return TokensResource(self._client)
@cached_property
def with_raw_response(self) -> AISearchResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return AISearchResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AISearchResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return AISearchResourceWithStreamingResponse(self)
class AsyncAISearchResource(AsyncAPIResource):
@cached_property
def instances(self) -> AsyncInstancesResource:
return AsyncInstancesResource(self._client)
@cached_property
def tokens(self) -> AsyncTokensResource:
return AsyncTokensResource(self._client)
@cached_property
def with_raw_response(self) -> AsyncAISearchResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return AsyncAISearchResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AsyncAISearchResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return AsyncAISearchResourceWithStreamingResponse(self)
class AISearchResourceWithRawResponse:
def __init__(self, aisearch: AISearchResource) -> None:
self._aisearch = aisearch
@cached_property
def instances(self) -> InstancesResourceWithRawResponse:
return InstancesResourceWithRawResponse(self._aisearch.instances)
@cached_property
def tokens(self) -> TokensResourceWithRawResponse:
return TokensResourceWithRawResponse(self._aisearch.tokens)
class AsyncAISearchResourceWithRawResponse:
def __init__(self, aisearch: AsyncAISearchResource) -> None:
self._aisearch = aisearch
@cached_property
def instances(self) -> AsyncInstancesResourceWithRawResponse:
return AsyncInstancesResourceWithRawResponse(self._aisearch.instances)
@cached_property
def tokens(self) -> AsyncTokensResourceWithRawResponse:
return AsyncTokensResourceWithRawResponse(self._aisearch.tokens)
class AISearchResourceWithStreamingResponse:
def __init__(self, aisearch: AISearchResource) -> None:
self._aisearch = aisearch
@cached_property
def instances(self) -> InstancesResourceWithStreamingResponse:
return InstancesResourceWithStreamingResponse(self._aisearch.instances)
@cached_property
def tokens(self) -> TokensResourceWithStreamingResponse:
return TokensResourceWithStreamingResponse(self._aisearch.tokens)
class AsyncAISearchResourceWithStreamingResponse:
def __init__(self, aisearch: AsyncAISearchResource) -> None:
self._aisearch = aisearch
@cached_property
def instances(self) -> AsyncInstancesResourceWithStreamingResponse:
return AsyncInstancesResourceWithStreamingResponse(self._aisearch.instances)
@cached_property
def tokens(self) -> AsyncTokensResourceWithStreamingResponse:
return AsyncTokensResourceWithStreamingResponse(self._aisearch.tokens)

View file

@ -0,0 +1,47 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .jobs import (
JobsResource,
AsyncJobsResource,
JobsResourceWithRawResponse,
AsyncJobsResourceWithRawResponse,
JobsResourceWithStreamingResponse,
AsyncJobsResourceWithStreamingResponse,
)
from .items import (
ItemsResource,
AsyncItemsResource,
ItemsResourceWithRawResponse,
AsyncItemsResourceWithRawResponse,
ItemsResourceWithStreamingResponse,
AsyncItemsResourceWithStreamingResponse,
)
from .instances import (
InstancesResource,
AsyncInstancesResource,
InstancesResourceWithRawResponse,
AsyncInstancesResourceWithRawResponse,
InstancesResourceWithStreamingResponse,
AsyncInstancesResourceWithStreamingResponse,
)
__all__ = [
"ItemsResource",
"AsyncItemsResource",
"ItemsResourceWithRawResponse",
"AsyncItemsResourceWithRawResponse",
"ItemsResourceWithStreamingResponse",
"AsyncItemsResourceWithStreamingResponse",
"JobsResource",
"AsyncJobsResource",
"JobsResourceWithRawResponse",
"AsyncJobsResourceWithRawResponse",
"JobsResourceWithStreamingResponse",
"AsyncJobsResourceWithStreamingResponse",
"InstancesResource",
"AsyncInstancesResource",
"InstancesResourceWithRawResponse",
"AsyncInstancesResourceWithRawResponse",
"InstancesResourceWithStreamingResponse",
"AsyncInstancesResourceWithStreamingResponse",
]

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,317 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Type, cast
from typing_extensions import Literal
import httpx
from ...._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ...._utils import maybe_transform
from ...._compat import cached_property
from ...._resource import SyncAPIResource, AsyncAPIResource
from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from ...._wrappers import ResultWrapper
from ....pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from ...._base_client import AsyncPaginator, make_request_options
from ....types.aisearch.instances import item_list_params
from ....types.aisearch.instances.item_get_response import ItemGetResponse
from ....types.aisearch.instances.item_list_response import ItemListResponse
__all__ = ["ItemsResource", "AsyncItemsResource"]
class ItemsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> ItemsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return ItemsResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> ItemsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return ItemsResourceWithStreamingResponse(self)
def list(
self,
id: str,
*,
account_id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
search: str | Omit = omit,
status: Literal["queued", "running", "completed", "error", "skipped"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncV4PagePaginationArray[ItemListResponse]:
"""
Items List.
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._get_api_list(
f"/accounts/{account_id}/ai-search/instances/{id}/items",
page=SyncV4PagePaginationArray[ItemListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
"search": search,
"status": status,
},
item_list_params.ItemListParams,
),
),
model=ItemListResponse,
)
def get(
self,
item_id: str,
*,
account_id: str,
id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ItemGetResponse:
"""
Get Items.
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
if not item_id:
raise ValueError(f"Expected a non-empty value for `item_id` but received {item_id!r}")
return self._get(
f"/accounts/{account_id}/ai-search/instances/{id}/items/{item_id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[ItemGetResponse]._unwrapper,
),
cast_to=cast(Type[ItemGetResponse], ResultWrapper[ItemGetResponse]),
)
class AsyncItemsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncItemsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return AsyncItemsResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AsyncItemsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return AsyncItemsResourceWithStreamingResponse(self)
def list(
self,
id: str,
*,
account_id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
search: str | Omit = omit,
status: Literal["queued", "running", "completed", "error", "skipped"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[ItemListResponse, AsyncV4PagePaginationArray[ItemListResponse]]:
"""
Items List.
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._get_api_list(
f"/accounts/{account_id}/ai-search/instances/{id}/items",
page=AsyncV4PagePaginationArray[ItemListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
"search": search,
"status": status,
},
item_list_params.ItemListParams,
),
),
model=ItemListResponse,
)
async def get(
self,
item_id: str,
*,
account_id: str,
id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ItemGetResponse:
"""
Get Items.
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
if not item_id:
raise ValueError(f"Expected a non-empty value for `item_id` but received {item_id!r}")
return await self._get(
f"/accounts/{account_id}/ai-search/instances/{id}/items/{item_id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[ItemGetResponse]._unwrapper,
),
cast_to=cast(Type[ItemGetResponse], ResultWrapper[ItemGetResponse]),
)
class ItemsResourceWithRawResponse:
def __init__(self, items: ItemsResource) -> None:
self._items = items
self.list = to_raw_response_wrapper(
items.list,
)
self.get = to_raw_response_wrapper(
items.get,
)
class AsyncItemsResourceWithRawResponse:
def __init__(self, items: AsyncItemsResource) -> None:
self._items = items
self.list = async_to_raw_response_wrapper(
items.list,
)
self.get = async_to_raw_response_wrapper(
items.get,
)
class ItemsResourceWithStreamingResponse:
def __init__(self, items: ItemsResource) -> None:
self._items = items
self.list = to_streamed_response_wrapper(
items.list,
)
self.get = to_streamed_response_wrapper(
items.get,
)
class AsyncItemsResourceWithStreamingResponse:
def __init__(self, items: AsyncItemsResource) -> None:
self._items = items
self.list = async_to_streamed_response_wrapper(
items.list,
)
self.get = async_to_streamed_response_wrapper(
items.get,
)

View file

@ -0,0 +1,526 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Type, cast
import httpx
from ...._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ...._utils import maybe_transform, async_maybe_transform
from ...._compat import cached_property
from ...._resource import SyncAPIResource, AsyncAPIResource
from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from ...._wrappers import ResultWrapper
from ....pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from ...._base_client import AsyncPaginator, make_request_options
from ....types.aisearch.instances import job_list_params, job_logs_params
from ....types.aisearch.instances.job_get_response import JobGetResponse
from ....types.aisearch.instances.job_list_response import JobListResponse
from ....types.aisearch.instances.job_logs_response import JobLogsResponse
from ....types.aisearch.instances.job_create_response import JobCreateResponse
__all__ = ["JobsResource", "AsyncJobsResource"]
class JobsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> JobsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return JobsResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> JobsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return JobsResourceWithStreamingResponse(self)
def create(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobCreateResponse:
"""
Create new job
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._post(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[JobCreateResponse]._unwrapper,
),
cast_to=cast(Type[JobCreateResponse], ResultWrapper[JobCreateResponse]),
)
def list(
self,
id: str,
*,
account_id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncV4PagePaginationArray[JobListResponse]:
"""
List Jobs
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._get_api_list(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs",
page=SyncV4PagePaginationArray[JobListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
},
job_list_params.JobListParams,
),
),
model=JobListResponse,
)
def get(
self,
job_id: str,
*,
account_id: str,
id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobGetResponse:
"""
Get a Job Details
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
if not job_id:
raise ValueError(f"Expected a non-empty value for `job_id` but received {job_id!r}")
return self._get(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs/{job_id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[JobGetResponse]._unwrapper,
),
cast_to=cast(Type[JobGetResponse], ResultWrapper[JobGetResponse]),
)
def logs(
self,
job_id: str,
*,
account_id: str,
id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobLogsResponse:
"""
List Job Logs
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
if not job_id:
raise ValueError(f"Expected a non-empty value for `job_id` but received {job_id!r}")
return self._get(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs/{job_id}/logs",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
},
job_logs_params.JobLogsParams,
),
post_parser=ResultWrapper[JobLogsResponse]._unwrapper,
),
cast_to=cast(Type[JobLogsResponse], ResultWrapper[JobLogsResponse]),
)
class AsyncJobsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncJobsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return AsyncJobsResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AsyncJobsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return AsyncJobsResourceWithStreamingResponse(self)
async def create(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobCreateResponse:
"""
Create new job
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return await self._post(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[JobCreateResponse]._unwrapper,
),
cast_to=cast(Type[JobCreateResponse], ResultWrapper[JobCreateResponse]),
)
def list(
self,
id: str,
*,
account_id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[JobListResponse, AsyncV4PagePaginationArray[JobListResponse]]:
"""
List Jobs
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._get_api_list(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs",
page=AsyncV4PagePaginationArray[JobListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
},
job_list_params.JobListParams,
),
),
model=JobListResponse,
)
async def get(
self,
job_id: str,
*,
account_id: str,
id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobGetResponse:
"""
Get a Job Details
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
if not job_id:
raise ValueError(f"Expected a non-empty value for `job_id` but received {job_id!r}")
return await self._get(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs/{job_id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[JobGetResponse]._unwrapper,
),
cast_to=cast(Type[JobGetResponse], ResultWrapper[JobGetResponse]),
)
async def logs(
self,
job_id: str,
*,
account_id: str,
id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobLogsResponse:
"""
List Job Logs
Args:
id: Use your AI Search ID.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
if not job_id:
raise ValueError(f"Expected a non-empty value for `job_id` but received {job_id!r}")
return await self._get(
f"/accounts/{account_id}/ai-search/instances/{id}/jobs/{job_id}/logs",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=await async_maybe_transform(
{
"page": page,
"per_page": per_page,
},
job_logs_params.JobLogsParams,
),
post_parser=ResultWrapper[JobLogsResponse]._unwrapper,
),
cast_to=cast(Type[JobLogsResponse], ResultWrapper[JobLogsResponse]),
)
class JobsResourceWithRawResponse:
def __init__(self, jobs: JobsResource) -> None:
self._jobs = jobs
self.create = to_raw_response_wrapper(
jobs.create,
)
self.list = to_raw_response_wrapper(
jobs.list,
)
self.get = to_raw_response_wrapper(
jobs.get,
)
self.logs = to_raw_response_wrapper(
jobs.logs,
)
class AsyncJobsResourceWithRawResponse:
def __init__(self, jobs: AsyncJobsResource) -> None:
self._jobs = jobs
self.create = async_to_raw_response_wrapper(
jobs.create,
)
self.list = async_to_raw_response_wrapper(
jobs.list,
)
self.get = async_to_raw_response_wrapper(
jobs.get,
)
self.logs = async_to_raw_response_wrapper(
jobs.logs,
)
class JobsResourceWithStreamingResponse:
def __init__(self, jobs: JobsResource) -> None:
self._jobs = jobs
self.create = to_streamed_response_wrapper(
jobs.create,
)
self.list = to_streamed_response_wrapper(
jobs.list,
)
self.get = to_streamed_response_wrapper(
jobs.get,
)
self.logs = to_streamed_response_wrapper(
jobs.logs,
)
class AsyncJobsResourceWithStreamingResponse:
def __init__(self, jobs: AsyncJobsResource) -> None:
self._jobs = jobs
self.create = async_to_streamed_response_wrapper(
jobs.create,
)
self.list = async_to_streamed_response_wrapper(
jobs.list,
)
self.get = async_to_streamed_response_wrapper(
jobs.get,
)
self.logs = async_to_streamed_response_wrapper(
jobs.logs,
)

View file

@ -0,0 +1,587 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Type, cast
import httpx
from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from ..._wrappers import ResultWrapper
from ...pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from ..._base_client import AsyncPaginator, make_request_options
from ...types.aisearch import token_list_params, token_create_params
from ...types.aisearch.token_list_response import TokenListResponse
from ...types.aisearch.token_read_response import TokenReadResponse
from ...types.aisearch.token_create_response import TokenCreateResponse
from ...types.aisearch.token_delete_response import TokenDeleteResponse
from ...types.aisearch.token_update_response import TokenUpdateResponse
__all__ = ["TokensResource", "AsyncTokensResource"]
class TokensResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> TokensResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return TokensResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> TokensResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return TokensResourceWithStreamingResponse(self)
def create(
self,
*,
account_id: str,
cf_api_id: str,
cf_api_key: str,
name: str,
legacy: bool | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenCreateResponse:
"""
Create new tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
return self._post(
f"/accounts/{account_id}/ai-search/tokens",
body=maybe_transform(
{
"cf_api_id": cf_api_id,
"cf_api_key": cf_api_key,
"name": name,
"legacy": legacy,
},
token_create_params.TokenCreateParams,
),
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenCreateResponse]._unwrapper,
),
cast_to=cast(Type[TokenCreateResponse], ResultWrapper[TokenCreateResponse]),
)
def update(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenUpdateResponse:
"""
Delete tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._delete(
f"/accounts/{account_id}/ai-search/tokens/{id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenUpdateResponse]._unwrapper,
),
cast_to=cast(Type[TokenUpdateResponse], ResultWrapper[TokenUpdateResponse]),
)
def list(
self,
*,
account_id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncV4PagePaginationArray[TokenListResponse]:
"""
List tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
return self._get_api_list(
f"/accounts/{account_id}/ai-search/tokens",
page=SyncV4PagePaginationArray[TokenListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
},
token_list_params.TokenListParams,
),
),
model=TokenListResponse,
)
def delete(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenDeleteResponse:
"""
Delete tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._delete(
f"/accounts/{account_id}/ai-search/tokens/{id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenDeleteResponse]._unwrapper,
),
cast_to=cast(Type[TokenDeleteResponse], ResultWrapper[TokenDeleteResponse]),
)
def read(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenReadResponse:
"""
Delete tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._delete(
f"/accounts/{account_id}/ai-search/tokens/{id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenReadResponse]._unwrapper,
),
cast_to=cast(Type[TokenReadResponse], ResultWrapper[TokenReadResponse]),
)
class AsyncTokensResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncTokensResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/cloudflare/cloudflare-python#accessing-raw-response-data-eg-headers
"""
return AsyncTokensResourceWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AsyncTokensResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/cloudflare/cloudflare-python#with_streaming_response
"""
return AsyncTokensResourceWithStreamingResponse(self)
async def create(
self,
*,
account_id: str,
cf_api_id: str,
cf_api_key: str,
name: str,
legacy: bool | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenCreateResponse:
"""
Create new tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
return await self._post(
f"/accounts/{account_id}/ai-search/tokens",
body=await async_maybe_transform(
{
"cf_api_id": cf_api_id,
"cf_api_key": cf_api_key,
"name": name,
"legacy": legacy,
},
token_create_params.TokenCreateParams,
),
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenCreateResponse]._unwrapper,
),
cast_to=cast(Type[TokenCreateResponse], ResultWrapper[TokenCreateResponse]),
)
async def update(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenUpdateResponse:
"""
Delete tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return await self._delete(
f"/accounts/{account_id}/ai-search/tokens/{id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenUpdateResponse]._unwrapper,
),
cast_to=cast(Type[TokenUpdateResponse], ResultWrapper[TokenUpdateResponse]),
)
def list(
self,
*,
account_id: str,
page: int | Omit = omit,
per_page: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[TokenListResponse, AsyncV4PagePaginationArray[TokenListResponse]]:
"""
List tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
return self._get_api_list(
f"/accounts/{account_id}/ai-search/tokens",
page=AsyncV4PagePaginationArray[TokenListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"page": page,
"per_page": per_page,
},
token_list_params.TokenListParams,
),
),
model=TokenListResponse,
)
async def delete(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenDeleteResponse:
"""
Delete tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return await self._delete(
f"/accounts/{account_id}/ai-search/tokens/{id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenDeleteResponse]._unwrapper,
),
cast_to=cast(Type[TokenDeleteResponse], ResultWrapper[TokenDeleteResponse]),
)
async def read(
self,
id: str,
*,
account_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TokenReadResponse:
"""
Delete tokens.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not account_id:
raise ValueError(f"Expected a non-empty value for `account_id` but received {account_id!r}")
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return await self._delete(
f"/accounts/{account_id}/ai-search/tokens/{id}",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=ResultWrapper[TokenReadResponse]._unwrapper,
),
cast_to=cast(Type[TokenReadResponse], ResultWrapper[TokenReadResponse]),
)
class TokensResourceWithRawResponse:
def __init__(self, tokens: TokensResource) -> None:
self._tokens = tokens
self.create = to_raw_response_wrapper(
tokens.create,
)
self.update = to_raw_response_wrapper(
tokens.update,
)
self.list = to_raw_response_wrapper(
tokens.list,
)
self.delete = to_raw_response_wrapper(
tokens.delete,
)
self.read = to_raw_response_wrapper(
tokens.read,
)
class AsyncTokensResourceWithRawResponse:
def __init__(self, tokens: AsyncTokensResource) -> None:
self._tokens = tokens
self.create = async_to_raw_response_wrapper(
tokens.create,
)
self.update = async_to_raw_response_wrapper(
tokens.update,
)
self.list = async_to_raw_response_wrapper(
tokens.list,
)
self.delete = async_to_raw_response_wrapper(
tokens.delete,
)
self.read = async_to_raw_response_wrapper(
tokens.read,
)
class TokensResourceWithStreamingResponse:
def __init__(self, tokens: TokensResource) -> None:
self._tokens = tokens
self.create = to_streamed_response_wrapper(
tokens.create,
)
self.update = to_streamed_response_wrapper(
tokens.update,
)
self.list = to_streamed_response_wrapper(
tokens.list,
)
self.delete = to_streamed_response_wrapper(
tokens.delete,
)
self.read = to_streamed_response_wrapper(
tokens.read,
)
class AsyncTokensResourceWithStreamingResponse:
def __init__(self, tokens: AsyncTokensResource) -> None:
self._tokens = tokens
self.create = async_to_streamed_response_wrapper(
tokens.create,
)
self.update = async_to_streamed_response_wrapper(
tokens.update,
)
self.list = async_to_streamed_response_wrapper(
tokens.list,
)
self.delete = async_to_streamed_response_wrapper(
tokens.delete,
)
self.read = async_to_streamed_response_wrapper(
tokens.read,
)

View file

@ -0,0 +1,20 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from .token_list_params import TokenListParams as TokenListParams
from .token_create_params import TokenCreateParams as TokenCreateParams
from .token_list_response import TokenListResponse as TokenListResponse
from .token_read_response import TokenReadResponse as TokenReadResponse
from .instance_list_params import InstanceListParams as InstanceListParams
from .token_create_response import TokenCreateResponse as TokenCreateResponse
from .token_delete_response import TokenDeleteResponse as TokenDeleteResponse
from .token_update_response import TokenUpdateResponse as TokenUpdateResponse
from .instance_create_params import InstanceCreateParams as InstanceCreateParams
from .instance_list_response import InstanceListResponse as InstanceListResponse
from .instance_read_response import InstanceReadResponse as InstanceReadResponse
from .instance_update_params import InstanceUpdateParams as InstanceUpdateParams
from .instance_stats_response import InstanceStatsResponse as InstanceStatsResponse
from .instance_create_response import InstanceCreateResponse as InstanceCreateResponse
from .instance_delete_response import InstanceDeleteResponse as InstanceDeleteResponse
from .instance_update_response import InstanceUpdateResponse as InstanceUpdateResponse

View file

@ -0,0 +1,176 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Dict
from typing_extensions import Literal, Required, Annotated, TypedDict
from ..._types import SequenceNotStr
from ..._utils import PropertyInfo
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceCreateParams",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class InstanceCreateParams(TypedDict, total=False):
account_id: Required[str]
id: Required[str]
"""Use your AI Search ID."""
source: Required[str]
token_id: Required[str]
type: Required[Literal["r2", "web-crawler"]]
ai_gateway_id: str
aisearch_model: Annotated[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
],
PropertyInfo(alias="ai_search_model"),
]
chunk: bool
chunk_overlap: int
chunk_size: int
embedding_model: Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
max_num_results: int
metadata: Metadata
reranking: bool
reranking_model: Literal["@cf/baai/bge-reranker-base", ""]
rewrite_model: Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
rewrite_query: bool
score_threshold: float
source_params: SourceParams
class Metadata(TypedDict, total=False):
created_from_aisearch_wizard: bool
worker_domain: str
class SourceParamsWebCrawlerParseOptions(TypedDict, total=False):
include_headers: Dict[str, str]
include_images: bool
use_browser_rendering: bool
class SourceParamsWebCrawlerStoreOptions(TypedDict, total=False):
storage_id: Required[str]
r2_jurisdiction: str
storage_type: Provider
class SourceParamsWebCrawler(TypedDict, total=False):
parse_options: SourceParamsWebCrawlerParseOptions
parse_type: Literal["sitemap", "feed-rss"]
store_options: SourceParamsWebCrawlerStoreOptions
class SourceParams(TypedDict, total=False):
exclude_items: SequenceNotStr[str]
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: SequenceNotStr[str]
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: str
r2_jurisdiction: str
web_crawler: SourceParamsWebCrawler

View file

@ -0,0 +1,248 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Optional
from datetime import datetime
from typing_extensions import Literal
from pydantic import Field as FieldInfo
from ..._models import BaseModel
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceCreateResponse",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class Metadata(BaseModel):
created_from_aisearch_wizard: Optional[bool] = None
worker_domain: Optional[str] = None
class SourceParamsWebCrawlerParseOptions(BaseModel):
include_headers: Optional[Dict[str, str]] = None
include_images: Optional[bool] = None
use_browser_rendering: Optional[bool] = None
class SourceParamsWebCrawlerStoreOptions(BaseModel):
storage_id: str
r2_jurisdiction: Optional[str] = None
storage_type: Optional[Provider] = None
class SourceParamsWebCrawler(BaseModel):
parse_options: Optional[SourceParamsWebCrawlerParseOptions] = None
parse_type: Optional[Literal["sitemap", "feed-rss"]] = None
store_options: Optional[SourceParamsWebCrawlerStoreOptions] = None
class SourceParams(BaseModel):
exclude_items: Optional[List[str]] = None
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: Optional[List[str]] = None
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: Optional[str] = None
r2_jurisdiction: Optional[str] = None
web_crawler: Optional[SourceParamsWebCrawler] = None
class InstanceCreateResponse(BaseModel):
id: str
"""Use your AI Search ID."""
account_id: str
account_tag: str
created_at: datetime
internal_id: str
modified_at: datetime
source: str
token_id: str
type: Literal["r2", "web-crawler"]
vectorize_name: str
ai_gateway_id: Optional[str] = None
aisearch_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = FieldInfo(alias="ai_search_model", default=None)
cache: Optional[bool] = None
cache_threshold: Optional[Literal["super_strict_match", "close_enough", "flexible_friend", "anything_goes"]] = None
chunk: Optional[bool] = None
chunk_overlap: Optional[int] = None
chunk_size: Optional[int] = None
created_by: Optional[str] = None
embedding_model: Optional[
Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
] = None
enable: Optional[bool] = None
engine_version: Optional[float] = None
last_activity: Optional[datetime] = None
max_num_results: Optional[int] = None
metadata: Optional[Metadata] = None
modified_by: Optional[str] = None
paused: Optional[bool] = None
reranking: Optional[bool] = None
reranking_model: Optional[Literal["@cf/baai/bge-reranker-base", ""]] = None
rewrite_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
rewrite_query: Optional[bool] = None
score_threshold: Optional[float] = None
source_params: Optional[SourceParams] = None
status: Optional[str] = None
summarization: Optional[bool] = None
summarization_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
system_prompt_aisearch: Optional[str] = FieldInfo(alias="system_prompt_ai_search", default=None)
system_prompt_index_summarization: Optional[str] = None
system_prompt_rewrite_query: Optional[str] = None
vectorize_active_namespace: Optional[str] = None

View file

@ -0,0 +1,248 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Optional
from datetime import datetime
from typing_extensions import Literal
from pydantic import Field as FieldInfo
from ..._models import BaseModel
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceDeleteResponse",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class Metadata(BaseModel):
created_from_aisearch_wizard: Optional[bool] = None
worker_domain: Optional[str] = None
class SourceParamsWebCrawlerParseOptions(BaseModel):
include_headers: Optional[Dict[str, str]] = None
include_images: Optional[bool] = None
use_browser_rendering: Optional[bool] = None
class SourceParamsWebCrawlerStoreOptions(BaseModel):
storage_id: str
r2_jurisdiction: Optional[str] = None
storage_type: Optional[Provider] = None
class SourceParamsWebCrawler(BaseModel):
parse_options: Optional[SourceParamsWebCrawlerParseOptions] = None
parse_type: Optional[Literal["sitemap", "feed-rss"]] = None
store_options: Optional[SourceParamsWebCrawlerStoreOptions] = None
class SourceParams(BaseModel):
exclude_items: Optional[List[str]] = None
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: Optional[List[str]] = None
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: Optional[str] = None
r2_jurisdiction: Optional[str] = None
web_crawler: Optional[SourceParamsWebCrawler] = None
class InstanceDeleteResponse(BaseModel):
id: str
"""Use your AI Search ID."""
account_id: str
account_tag: str
created_at: datetime
internal_id: str
modified_at: datetime
source: str
token_id: str
type: Literal["r2", "web-crawler"]
vectorize_name: str
ai_gateway_id: Optional[str] = None
aisearch_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = FieldInfo(alias="ai_search_model", default=None)
cache: Optional[bool] = None
cache_threshold: Optional[Literal["super_strict_match", "close_enough", "flexible_friend", "anything_goes"]] = None
chunk: Optional[bool] = None
chunk_overlap: Optional[int] = None
chunk_size: Optional[int] = None
created_by: Optional[str] = None
embedding_model: Optional[
Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
] = None
enable: Optional[bool] = None
engine_version: Optional[float] = None
last_activity: Optional[datetime] = None
max_num_results: Optional[int] = None
metadata: Optional[Metadata] = None
modified_by: Optional[str] = None
paused: Optional[bool] = None
reranking: Optional[bool] = None
reranking_model: Optional[Literal["@cf/baai/bge-reranker-base", ""]] = None
rewrite_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
rewrite_query: Optional[bool] = None
score_threshold: Optional[float] = None
source_params: Optional[SourceParams] = None
status: Optional[str] = None
summarization: Optional[bool] = None
summarization_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
system_prompt_aisearch: Optional[str] = FieldInfo(alias="system_prompt_ai_search", default=None)
system_prompt_index_summarization: Optional[str] = None
system_prompt_rewrite_query: Optional[str] = None
vectorize_active_namespace: Optional[str] = None

View file

@ -0,0 +1,18 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["InstanceListParams"]
class InstanceListParams(TypedDict, total=False):
account_id: Required[str]
page: int
per_page: int
search: str
"""Search by id"""

View file

@ -0,0 +1,248 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Optional
from datetime import datetime
from typing_extensions import Literal
from pydantic import Field as FieldInfo
from ..._models import BaseModel
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceListResponse",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class Metadata(BaseModel):
created_from_aisearch_wizard: Optional[bool] = None
worker_domain: Optional[str] = None
class SourceParamsWebCrawlerParseOptions(BaseModel):
include_headers: Optional[Dict[str, str]] = None
include_images: Optional[bool] = None
use_browser_rendering: Optional[bool] = None
class SourceParamsWebCrawlerStoreOptions(BaseModel):
storage_id: str
r2_jurisdiction: Optional[str] = None
storage_type: Optional[Provider] = None
class SourceParamsWebCrawler(BaseModel):
parse_options: Optional[SourceParamsWebCrawlerParseOptions] = None
parse_type: Optional[Literal["sitemap", "feed-rss"]] = None
store_options: Optional[SourceParamsWebCrawlerStoreOptions] = None
class SourceParams(BaseModel):
exclude_items: Optional[List[str]] = None
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: Optional[List[str]] = None
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: Optional[str] = None
r2_jurisdiction: Optional[str] = None
web_crawler: Optional[SourceParamsWebCrawler] = None
class InstanceListResponse(BaseModel):
id: str
"""Use your AI Search ID."""
account_id: str
account_tag: str
created_at: datetime
internal_id: str
modified_at: datetime
source: str
token_id: str
type: Literal["r2", "web-crawler"]
vectorize_name: str
ai_gateway_id: Optional[str] = None
aisearch_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = FieldInfo(alias="ai_search_model", default=None)
cache: Optional[bool] = None
cache_threshold: Optional[Literal["super_strict_match", "close_enough", "flexible_friend", "anything_goes"]] = None
chunk: Optional[bool] = None
chunk_overlap: Optional[int] = None
chunk_size: Optional[int] = None
created_by: Optional[str] = None
embedding_model: Optional[
Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
] = None
enable: Optional[bool] = None
engine_version: Optional[float] = None
last_activity: Optional[datetime] = None
max_num_results: Optional[int] = None
metadata: Optional[Metadata] = None
modified_by: Optional[str] = None
paused: Optional[bool] = None
reranking: Optional[bool] = None
reranking_model: Optional[Literal["@cf/baai/bge-reranker-base", ""]] = None
rewrite_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
rewrite_query: Optional[bool] = None
score_threshold: Optional[float] = None
source_params: Optional[SourceParams] = None
status: Optional[str] = None
summarization: Optional[bool] = None
summarization_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
system_prompt_aisearch: Optional[str] = FieldInfo(alias="system_prompt_ai_search", default=None)
system_prompt_index_summarization: Optional[str] = None
system_prompt_rewrite_query: Optional[str] = None
vectorize_active_namespace: Optional[str] = None

View file

@ -0,0 +1,248 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Optional
from datetime import datetime
from typing_extensions import Literal
from pydantic import Field as FieldInfo
from ..._models import BaseModel
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceReadResponse",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class Metadata(BaseModel):
created_from_aisearch_wizard: Optional[bool] = None
worker_domain: Optional[str] = None
class SourceParamsWebCrawlerParseOptions(BaseModel):
include_headers: Optional[Dict[str, str]] = None
include_images: Optional[bool] = None
use_browser_rendering: Optional[bool] = None
class SourceParamsWebCrawlerStoreOptions(BaseModel):
storage_id: str
r2_jurisdiction: Optional[str] = None
storage_type: Optional[Provider] = None
class SourceParamsWebCrawler(BaseModel):
parse_options: Optional[SourceParamsWebCrawlerParseOptions] = None
parse_type: Optional[Literal["sitemap", "feed-rss"]] = None
store_options: Optional[SourceParamsWebCrawlerStoreOptions] = None
class SourceParams(BaseModel):
exclude_items: Optional[List[str]] = None
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: Optional[List[str]] = None
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: Optional[str] = None
r2_jurisdiction: Optional[str] = None
web_crawler: Optional[SourceParamsWebCrawler] = None
class InstanceReadResponse(BaseModel):
id: str
"""Use your AI Search ID."""
account_id: str
account_tag: str
created_at: datetime
internal_id: str
modified_at: datetime
source: str
token_id: str
type: Literal["r2", "web-crawler"]
vectorize_name: str
ai_gateway_id: Optional[str] = None
aisearch_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = FieldInfo(alias="ai_search_model", default=None)
cache: Optional[bool] = None
cache_threshold: Optional[Literal["super_strict_match", "close_enough", "flexible_friend", "anything_goes"]] = None
chunk: Optional[bool] = None
chunk_overlap: Optional[int] = None
chunk_size: Optional[int] = None
created_by: Optional[str] = None
embedding_model: Optional[
Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
] = None
enable: Optional[bool] = None
engine_version: Optional[float] = None
last_activity: Optional[datetime] = None
max_num_results: Optional[int] = None
metadata: Optional[Metadata] = None
modified_by: Optional[str] = None
paused: Optional[bool] = None
reranking: Optional[bool] = None
reranking_model: Optional[Literal["@cf/baai/bge-reranker-base", ""]] = None
rewrite_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
rewrite_query: Optional[bool] = None
score_threshold: Optional[float] = None
source_params: Optional[SourceParams] = None
status: Optional[str] = None
summarization: Optional[bool] = None
summarization_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
system_prompt_aisearch: Optional[str] = FieldInfo(alias="system_prompt_ai_search", default=None)
system_prompt_index_summarization: Optional[str] = None
system_prompt_rewrite_query: Optional[str] = None
vectorize_active_namespace: Optional[str] = None

View file

@ -0,0 +1,26 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, Optional
from datetime import datetime
from ..._models import BaseModel
__all__ = ["InstanceStatsResponse"]
class InstanceStatsResponse(BaseModel):
completed: Optional[int] = None
error: Optional[int] = None
file_embed_errors: Optional[Dict[str, object]] = None
index_source_errors: Optional[Dict[str, object]] = None
last_activity: Optional[datetime] = None
queued: Optional[int] = None
running: Optional[int] = None
skipped: Optional[int] = None

View file

@ -0,0 +1,212 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Dict
from typing_extensions import Literal, Required, Annotated, TypedDict
from ..._types import SequenceNotStr
from ..._utils import PropertyInfo
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceUpdateParams",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class InstanceUpdateParams(TypedDict, total=False):
account_id: Required[str]
ai_gateway_id: str
aisearch_model: Annotated[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
],
PropertyInfo(alias="ai_search_model"),
]
cache: bool
cache_threshold: Literal["super_strict_match", "close_enough", "flexible_friend", "anything_goes"]
chunk: bool
chunk_overlap: int
chunk_size: int
embedding_model: Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
max_num_results: int
metadata: Metadata
paused: bool
reranking: bool
reranking_model: Literal["@cf/baai/bge-reranker-base", ""]
rewrite_model: Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
rewrite_query: bool
score_threshold: float
source_params: SourceParams
summarization: bool
summarization_model: Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
system_prompt_aisearch: Annotated[str, PropertyInfo(alias="system_prompt_ai_search")]
system_prompt_index_summarization: str
system_prompt_rewrite_query: str
token_id: str
class Metadata(TypedDict, total=False):
created_from_aisearch_wizard: bool
worker_domain: str
class SourceParamsWebCrawlerParseOptions(TypedDict, total=False):
include_headers: Dict[str, str]
include_images: bool
use_browser_rendering: bool
class SourceParamsWebCrawlerStoreOptions(TypedDict, total=False):
storage_id: Required[str]
r2_jurisdiction: str
storage_type: Provider
class SourceParamsWebCrawler(TypedDict, total=False):
parse_options: SourceParamsWebCrawlerParseOptions
parse_type: Literal["sitemap", "feed-rss"]
store_options: SourceParamsWebCrawlerStoreOptions
class SourceParams(TypedDict, total=False):
exclude_items: SequenceNotStr[str]
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: SequenceNotStr[str]
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: str
r2_jurisdiction: str
web_crawler: SourceParamsWebCrawler

View file

@ -0,0 +1,248 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Optional
from datetime import datetime
from typing_extensions import Literal
from pydantic import Field as FieldInfo
from ..._models import BaseModel
from ..r2.buckets.provider import Provider
__all__ = [
"InstanceUpdateResponse",
"Metadata",
"SourceParams",
"SourceParamsWebCrawler",
"SourceParamsWebCrawlerParseOptions",
"SourceParamsWebCrawlerStoreOptions",
]
class Metadata(BaseModel):
created_from_aisearch_wizard: Optional[bool] = None
worker_domain: Optional[str] = None
class SourceParamsWebCrawlerParseOptions(BaseModel):
include_headers: Optional[Dict[str, str]] = None
include_images: Optional[bool] = None
use_browser_rendering: Optional[bool] = None
class SourceParamsWebCrawlerStoreOptions(BaseModel):
storage_id: str
r2_jurisdiction: Optional[str] = None
storage_type: Optional[Provider] = None
class SourceParamsWebCrawler(BaseModel):
parse_options: Optional[SourceParamsWebCrawlerParseOptions] = None
parse_type: Optional[Literal["sitemap", "feed-rss"]] = None
store_options: Optional[SourceParamsWebCrawlerStoreOptions] = None
class SourceParams(BaseModel):
exclude_items: Optional[List[str]] = None
"""List of path patterns to exclude.
Supports wildcards (e.g., _/admin/_, /private/\\**_, _\\pprivate\\**)
"""
include_items: Optional[List[str]] = None
"""List of path patterns to include.
Supports wildcards (e.g., _/blog/_.html, /docs/\\**_, _\blog\\**.html)
"""
prefix: Optional[str] = None
r2_jurisdiction: Optional[str] = None
web_crawler: Optional[SourceParamsWebCrawler] = None
class InstanceUpdateResponse(BaseModel):
id: str
"""Use your AI Search ID."""
account_id: str
account_tag: str
created_at: datetime
internal_id: str
modified_at: datetime
source: str
token_id: str
type: Literal["r2", "web-crawler"]
vectorize_name: str
ai_gateway_id: Optional[str] = None
aisearch_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = FieldInfo(alias="ai_search_model", default=None)
cache: Optional[bool] = None
cache_threshold: Optional[Literal["super_strict_match", "close_enough", "flexible_friend", "anything_goes"]] = None
chunk: Optional[bool] = None
chunk_overlap: Optional[int] = None
chunk_size: Optional[int] = None
created_by: Optional[str] = None
embedding_model: Optional[
Literal[
"@cf/baai/bge-m3",
"@cf/baai/bge-large-en-v1.5",
"@cf/google/embeddinggemma-300m",
"@cf/qwen/qwen3-embedding-0.6b",
"google-ai-studio/gemini-embedding-001",
"openai/text-embedding-3-small",
"openai/text-embedding-3-large",
"",
]
] = None
enable: Optional[bool] = None
engine_version: Optional[float] = None
last_activity: Optional[datetime] = None
max_num_results: Optional[int] = None
metadata: Optional[Metadata] = None
modified_by: Optional[str] = None
paused: Optional[bool] = None
reranking: Optional[bool] = None
reranking_model: Optional[Literal["@cf/baai/bge-reranker-base", ""]] = None
rewrite_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
rewrite_query: Optional[bool] = None
score_threshold: Optional[float] = None
source_params: Optional[SourceParams] = None
status: Optional[str] = None
summarization: Optional[bool] = None
summarization_model: Optional[
Literal[
"@cf/meta/llama-3.3-70b-instruct-fp8-fast",
"@cf/meta/llama-3.1-8b-instruct-fast",
"@cf/meta/llama-3.1-8b-instruct-fp8",
"@cf/meta/llama-4-scout-17b-16e-instruct",
"@cf/qwen/qwen3-30b-a3b-fp8",
"@cf/deepseek-ai/deepseek-r1-distill-qwen-32b",
"@cf/moonshotai/kimi-k2-instruct",
"anthropic/claude-3-7-sonnet",
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3-5-haiku",
"cerebras/qwen-3-235b-a22b-instruct",
"cerebras/qwen-3-235b-a22b-thinking",
"cerebras/llama-3.3-70b",
"cerebras/llama-4-maverick-17b-128e-instruct",
"cerebras/llama-4-scout-17b-16e-instruct",
"cerebras/gpt-oss-120b",
"google-ai-studio/gemini-2.5-flash",
"google-ai-studio/gemini-2.5-pro",
"grok/grok-4",
"groq/llama-3.3-70b-versatile",
"groq/llama-3.1-8b-instant",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"",
]
] = None
system_prompt_aisearch: Optional[str] = FieldInfo(alias="system_prompt_ai_search", default=None)
system_prompt_index_summarization: Optional[str] = None
system_prompt_rewrite_query: Optional[str] = None
vectorize_active_namespace: Optional[str] = None

View file

@ -0,0 +1,13 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from .job_list_params import JobListParams as JobListParams
from .job_logs_params import JobLogsParams as JobLogsParams
from .item_list_params import ItemListParams as ItemListParams
from .job_get_response import JobGetResponse as JobGetResponse
from .item_get_response import ItemGetResponse as ItemGetResponse
from .job_list_response import JobListResponse as JobListResponse
from .job_logs_response import JobLogsResponse as JobLogsResponse
from .item_list_response import ItemListResponse as ItemListResponse
from .job_create_response import JobCreateResponse as JobCreateResponse

View file

@ -0,0 +1,23 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from typing_extensions import Literal
from ...._models import BaseModel
__all__ = ["ItemGetResponse"]
class ItemGetResponse(BaseModel):
id: str
key: str
status: Literal["queued", "running", "completed", "error", "skipped"]
error: Optional[str] = None
last_seen_at: Optional[datetime] = None
next_action: Optional[str] = None

View file

@ -0,0 +1,19 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ItemListParams"]
class ItemListParams(TypedDict, total=False):
account_id: Required[str]
page: int
per_page: int
search: str
status: Literal["queued", "running", "completed", "error", "skipped"]

View file

@ -0,0 +1,23 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from typing_extensions import Literal
from ...._models import BaseModel
__all__ = ["ItemListResponse"]
class ItemListResponse(BaseModel):
id: str
key: str
status: Literal["queued", "running", "completed", "error", "skipped"]
error: Optional[str] = None
last_seen_at: Optional[datetime] = None
next_action: Optional[str] = None

View file

@ -0,0 +1,22 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from typing_extensions import Literal
from ...._models import BaseModel
__all__ = ["JobCreateResponse"]
class JobCreateResponse(BaseModel):
id: str
source: Literal["user", "schedule"]
end_reason: Optional[str] = None
ended_at: Optional[str] = None
last_seen_at: Optional[str] = None
started_at: Optional[str] = None

View file

@ -0,0 +1,22 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from typing_extensions import Literal
from ...._models import BaseModel
__all__ = ["JobGetResponse"]
class JobGetResponse(BaseModel):
id: str
source: Literal["user", "schedule"]
end_reason: Optional[str] = None
ended_at: Optional[str] = None
last_seen_at: Optional[str] = None
started_at: Optional[str] = None

View file

@ -0,0 +1,15 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["JobListParams"]
class JobListParams(TypedDict, total=False):
account_id: Required[str]
page: int
per_page: int

View file

@ -0,0 +1,22 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from typing_extensions import Literal
from ...._models import BaseModel
__all__ = ["JobListResponse"]
class JobListResponse(BaseModel):
id: str
source: Literal["user", "schedule"]
end_reason: Optional[str] = None
ended_at: Optional[str] = None
last_seen_at: Optional[str] = None
started_at: Optional[str] = None

View file

@ -0,0 +1,18 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["JobLogsParams"]
class JobLogsParams(TypedDict, total=False):
account_id: Required[str]
id: Required[str]
"""Use your AI Search ID."""
page: int
per_page: int

View file

@ -0,0 +1,21 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
from typing_extensions import TypeAlias
from ...._models import BaseModel
__all__ = ["JobLogsResponse", "JobLogsResponseItem"]
class JobLogsResponseItem(BaseModel):
id: int
created_at: float
message: str
message_type: int
JobLogsResponse: TypeAlias = List[JobLogsResponseItem]

View file

@ -0,0 +1,19 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["TokenCreateParams"]
class TokenCreateParams(TypedDict, total=False):
account_id: Required[str]
cf_api_id: Required[str]
cf_api_key: Required[str]
name: Required[str]
legacy: bool

View file

@ -0,0 +1,36 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from ..._models import BaseModel
__all__ = ["TokenCreateResponse"]
class TokenCreateResponse(BaseModel):
id: str
account_id: str
account_tag: str
cf_api_id: str
cf_api_key: str
created_at: datetime
modified_at: datetime
name: str
created_by: Optional[str] = None
enabled: Optional[bool] = None
legacy: Optional[bool] = None
modified_by: Optional[str] = None
synced_at: Optional[datetime] = None

View file

@ -0,0 +1,36 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from ..._models import BaseModel
__all__ = ["TokenDeleteResponse"]
class TokenDeleteResponse(BaseModel):
id: str
account_id: str
account_tag: str
cf_api_id: str
cf_api_key: str
created_at: datetime
modified_at: datetime
name: str
created_by: Optional[str] = None
enabled: Optional[bool] = None
legacy: Optional[bool] = None
modified_by: Optional[str] = None
synced_at: Optional[datetime] = None

View file

@ -0,0 +1,15 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["TokenListParams"]
class TokenListParams(TypedDict, total=False):
account_id: Required[str]
page: int
per_page: int

View file

@ -0,0 +1,36 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from ..._models import BaseModel
__all__ = ["TokenListResponse"]
class TokenListResponse(BaseModel):
id: str
account_id: str
account_tag: str
cf_api_id: str
cf_api_key: str
created_at: datetime
modified_at: datetime
name: str
created_by: Optional[str] = None
enabled: Optional[bool] = None
legacy: Optional[bool] = None
modified_by: Optional[str] = None
synced_at: Optional[datetime] = None

View file

@ -0,0 +1,36 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from ..._models import BaseModel
__all__ = ["TokenReadResponse"]
class TokenReadResponse(BaseModel):
id: str
account_id: str
account_tag: str
cf_api_id: str
cf_api_key: str
created_at: datetime
modified_at: datetime
name: str
created_by: Optional[str] = None
enabled: Optional[bool] = None
legacy: Optional[bool] = None
modified_by: Optional[str] = None
synced_at: Optional[datetime] = None

View file

@ -0,0 +1,36 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from datetime import datetime
from ..._models import BaseModel
__all__ = ["TokenUpdateResponse"]
class TokenUpdateResponse(BaseModel):
id: str
account_id: str
account_tag: str
cf_api_id: str
cf_api_key: str
created_at: datetime
modified_at: datetime
name: str
created_by: Optional[str] = None
enabled: Optional[bool] = None
legacy: Optional[bool] = None
modified_by: Optional[str] = None
synced_at: Optional[datetime] = None

View file

@ -0,0 +1 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

View file

@ -0,0 +1 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

View file

@ -0,0 +1,265 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from cloudflare import Cloudflare, AsyncCloudflare
from tests.utils import assert_matches_type
from cloudflare.pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from cloudflare.types.aisearch.instances import ItemGetResponse, ItemListResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestItems:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_list(self, client: Cloudflare) -> None:
item = client.aisearch.instances.items.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(SyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: Cloudflare) -> None:
item = client.aisearch.instances.items.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=0,
search="search",
status="queued",
)
assert_matches_type(SyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
@parametrize
def test_raw_response_list(self, client: Cloudflare) -> None:
response = client.aisearch.instances.items.with_raw_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = response.parse()
assert_matches_type(SyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
@parametrize
def test_streaming_response_list(self, client: Cloudflare) -> None:
with client.aisearch.instances.items.with_streaming_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = response.parse()
assert_matches_type(SyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_list(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.items.with_raw_response.list(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.items.with_raw_response.list(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_get(self, client: Cloudflare) -> None:
item = client.aisearch.instances.items.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert_matches_type(ItemGetResponse, item, path=["response"])
@parametrize
def test_raw_response_get(self, client: Cloudflare) -> None:
response = client.aisearch.instances.items.with_raw_response.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = response.parse()
assert_matches_type(ItemGetResponse, item, path=["response"])
@parametrize
def test_streaming_response_get(self, client: Cloudflare) -> None:
with client.aisearch.instances.items.with_streaming_response.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = response.parse()
assert_matches_type(ItemGetResponse, item, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_get(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.items.with_raw_response.get(
item_id="item_id",
account_id="",
id="my-ai-search",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.items.with_raw_response.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `item_id` but received ''"):
client.aisearch.instances.items.with_raw_response.get(
item_id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
class TestAsyncItems:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@parametrize
async def test_method_list(self, async_client: AsyncCloudflare) -> None:
item = await async_client.aisearch.instances.items.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(AsyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncCloudflare) -> None:
item = await async_client.aisearch.instances.items.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=0,
search="search",
status="queued",
)
assert_matches_type(AsyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.items.with_raw_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.items.with_streaming_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[ItemListResponse], item, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_list(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.items.with_raw_response.list(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.items.with_raw_response.list(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_get(self, async_client: AsyncCloudflare) -> None:
item = await async_client.aisearch.instances.items.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert_matches_type(ItemGetResponse, item, path=["response"])
@parametrize
async def test_raw_response_get(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.items.with_raw_response.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = await response.parse()
assert_matches_type(ItemGetResponse, item, path=["response"])
@parametrize
async def test_streaming_response_get(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.items.with_streaming_response.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = await response.parse()
assert_matches_type(ItemGetResponse, item, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_get(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.items.with_raw_response.get(
item_id="item_id",
account_id="",
id="my-ai-search",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.items.with_raw_response.get(
item_id="item_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `item_id` but received ''"):
await async_client.aisearch.instances.items.with_raw_response.get(
item_id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)

View file

@ -0,0 +1,504 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from cloudflare import Cloudflare, AsyncCloudflare
from tests.utils import assert_matches_type
from cloudflare.pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from cloudflare.types.aisearch.instances import (
JobGetResponse,
JobListResponse,
JobLogsResponse,
JobCreateResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestJobs:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_create(self, client: Cloudflare) -> None:
job = client.aisearch.instances.jobs.create(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(JobCreateResponse, job, path=["response"])
@parametrize
def test_raw_response_create(self, client: Cloudflare) -> None:
response = client.aisearch.instances.jobs.with_raw_response.create(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(JobCreateResponse, job, path=["response"])
@parametrize
def test_streaming_response_create(self, client: Cloudflare) -> None:
with client.aisearch.instances.jobs.with_streaming_response.create(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(JobCreateResponse, job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_create(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.create(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.create(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_list(self, client: Cloudflare) -> None:
job = client.aisearch.instances.jobs.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(SyncV4PagePaginationArray[JobListResponse], job, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: Cloudflare) -> None:
job = client.aisearch.instances.jobs.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=0,
)
assert_matches_type(SyncV4PagePaginationArray[JobListResponse], job, path=["response"])
@parametrize
def test_raw_response_list(self, client: Cloudflare) -> None:
response = client.aisearch.instances.jobs.with_raw_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(SyncV4PagePaginationArray[JobListResponse], job, path=["response"])
@parametrize
def test_streaming_response_list(self, client: Cloudflare) -> None:
with client.aisearch.instances.jobs.with_streaming_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(SyncV4PagePaginationArray[JobListResponse], job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_list(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.list(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.list(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_get(self, client: Cloudflare) -> None:
job = client.aisearch.instances.jobs.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert_matches_type(JobGetResponse, job, path=["response"])
@parametrize
def test_raw_response_get(self, client: Cloudflare) -> None:
response = client.aisearch.instances.jobs.with_raw_response.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(JobGetResponse, job, path=["response"])
@parametrize
def test_streaming_response_get(self, client: Cloudflare) -> None:
with client.aisearch.instances.jobs.with_streaming_response.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(JobGetResponse, job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_get(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.get(
job_id="job_id",
account_id="",
id="my-ai-search",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `job_id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.get(
job_id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
@parametrize
def test_method_logs(self, client: Cloudflare) -> None:
job = client.aisearch.instances.jobs.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert_matches_type(JobLogsResponse, job, path=["response"])
@parametrize
def test_method_logs_with_all_params(self, client: Cloudflare) -> None:
job = client.aisearch.instances.jobs.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
page=1,
per_page=0,
)
assert_matches_type(JobLogsResponse, job, path=["response"])
@parametrize
def test_raw_response_logs(self, client: Cloudflare) -> None:
response = client.aisearch.instances.jobs.with_raw_response.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(JobLogsResponse, job, path=["response"])
@parametrize
def test_streaming_response_logs(self, client: Cloudflare) -> None:
with client.aisearch.instances.jobs.with_streaming_response.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = response.parse()
assert_matches_type(JobLogsResponse, job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_logs(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.logs(
job_id="job_id",
account_id="",
id="my-ai-search",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `job_id` but received ''"):
client.aisearch.instances.jobs.with_raw_response.logs(
job_id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
class TestAsyncJobs:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@parametrize
async def test_method_create(self, async_client: AsyncCloudflare) -> None:
job = await async_client.aisearch.instances.jobs.create(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(JobCreateResponse, job, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.jobs.with_raw_response.create(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(JobCreateResponse, job, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.jobs.with_streaming_response.create(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(JobCreateResponse, job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_create(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.create(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.create(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_list(self, async_client: AsyncCloudflare) -> None:
job = await async_client.aisearch.instances.jobs.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(AsyncV4PagePaginationArray[JobListResponse], job, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncCloudflare) -> None:
job = await async_client.aisearch.instances.jobs.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=0,
)
assert_matches_type(AsyncV4PagePaginationArray[JobListResponse], job, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.jobs.with_raw_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[JobListResponse], job, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.jobs.with_streaming_response.list(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[JobListResponse], job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_list(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.list(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.list(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_get(self, async_client: AsyncCloudflare) -> None:
job = await async_client.aisearch.instances.jobs.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert_matches_type(JobGetResponse, job, path=["response"])
@parametrize
async def test_raw_response_get(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.jobs.with_raw_response.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(JobGetResponse, job, path=["response"])
@parametrize
async def test_streaming_response_get(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.jobs.with_streaming_response.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(JobGetResponse, job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_get(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.get(
job_id="job_id",
account_id="",
id="my-ai-search",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.get(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `job_id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.get(
job_id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
@parametrize
async def test_method_logs(self, async_client: AsyncCloudflare) -> None:
job = await async_client.aisearch.instances.jobs.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert_matches_type(JobLogsResponse, job, path=["response"])
@parametrize
async def test_method_logs_with_all_params(self, async_client: AsyncCloudflare) -> None:
job = await async_client.aisearch.instances.jobs.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
page=1,
per_page=0,
)
assert_matches_type(JobLogsResponse, job, path=["response"])
@parametrize
async def test_raw_response_logs(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.jobs.with_raw_response.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(JobLogsResponse, job, path=["response"])
@parametrize
async def test_streaming_response_logs(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.jobs.with_streaming_response.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
job = await response.parse()
assert_matches_type(JobLogsResponse, job, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_logs(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.logs(
job_id="job_id",
account_id="",
id="my-ai-search",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.logs(
job_id="job_id",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `job_id` but received ''"):
await async_client.aisearch.instances.jobs.with_raw_response.logs(
job_id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
)

View file

@ -0,0 +1,816 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from cloudflare import Cloudflare, AsyncCloudflare
from tests.utils import assert_matches_type
from cloudflare.pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from cloudflare.types.aisearch import (
InstanceListResponse,
InstanceReadResponse,
InstanceStatsResponse,
InstanceCreateResponse,
InstanceDeleteResponse,
InstanceUpdateResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestInstances:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_create(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
)
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
@parametrize
def test_method_create_with_all_params(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
ai_gateway_id="ai_gateway_id",
aisearch_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
chunk=True,
chunk_overlap=0,
chunk_size=64,
embedding_model="@cf/baai/bge-m3",
max_num_results=1,
metadata={
"created_from_aisearch_wizard": True,
"worker_domain": "worker_domain",
},
reranking=True,
reranking_model="@cf/baai/bge-reranker-base",
rewrite_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
rewrite_query=True,
score_threshold=0,
source_params={
"exclude_items": ["/admin/*", "/private/**", "*\\temp\\*"],
"include_items": ["/blog/*", "/docs/**/*.html", "*\\blog\\*.html"],
"prefix": "prefix",
"r2_jurisdiction": "r2_jurisdiction",
"web_crawler": {
"parse_options": {
"include_headers": {"foo": "string"},
"include_images": True,
"use_browser_rendering": True,
},
"parse_type": "sitemap",
"store_options": {
"storage_id": "storage_id",
"r2_jurisdiction": "r2_jurisdiction",
"storage_type": "r2",
},
},
},
)
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
@parametrize
def test_raw_response_create(self, client: Cloudflare) -> None:
response = client.aisearch.instances.with_raw_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
@parametrize
def test_streaming_response_create(self, client: Cloudflare) -> None:
with client.aisearch.instances.with_streaming_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_create(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.with_raw_response.create(
account_id="",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
)
@parametrize
def test_method_update(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
@parametrize
def test_method_update_with_all_params(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
ai_gateway_id="ai_gateway_id",
aisearch_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
cache=True,
cache_threshold="super_strict_match",
chunk=True,
chunk_overlap=0,
chunk_size=64,
embedding_model="@cf/baai/bge-m3",
max_num_results=1,
metadata={
"created_from_aisearch_wizard": True,
"worker_domain": "worker_domain",
},
paused=True,
reranking=True,
reranking_model="@cf/baai/bge-reranker-base",
rewrite_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
rewrite_query=True,
score_threshold=0,
source_params={
"exclude_items": ["/admin/*", "/private/**", "*\\temp\\*"],
"include_items": ["/blog/*", "/docs/**/*.html", "*\\blog\\*.html"],
"prefix": "prefix",
"r2_jurisdiction": "r2_jurisdiction",
"web_crawler": {
"parse_options": {
"include_headers": {"foo": "string"},
"include_images": True,
"use_browser_rendering": True,
},
"parse_type": "sitemap",
"store_options": {
"storage_id": "storage_id",
"r2_jurisdiction": "r2_jurisdiction",
"storage_type": "r2",
},
},
},
summarization=True,
summarization_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
system_prompt_aisearch="system_prompt_ai_search",
system_prompt_index_summarization="system_prompt_index_summarization",
system_prompt_rewrite_query="system_prompt_rewrite_query",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
@parametrize
def test_raw_response_update(self, client: Cloudflare) -> None:
response = client.aisearch.instances.with_raw_response.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
@parametrize
def test_streaming_response_update(self, client: Cloudflare) -> None:
with client.aisearch.instances.with_streaming_response.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_update(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.with_raw_response.update(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.with_raw_response.update(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_list(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(SyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=1,
search="search",
)
assert_matches_type(SyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
@parametrize
def test_raw_response_list(self, client: Cloudflare) -> None:
response = client.aisearch.instances.with_raw_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(SyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
@parametrize
def test_streaming_response_list(self, client: Cloudflare) -> None:
with client.aisearch.instances.with_streaming_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(SyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_list(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.with_raw_response.list(
account_id="",
)
@parametrize
def test_method_delete(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.delete(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceDeleteResponse, instance, path=["response"])
@parametrize
def test_raw_response_delete(self, client: Cloudflare) -> None:
response = client.aisearch.instances.with_raw_response.delete(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceDeleteResponse, instance, path=["response"])
@parametrize
def test_streaming_response_delete(self, client: Cloudflare) -> None:
with client.aisearch.instances.with_streaming_response.delete(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceDeleteResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_delete(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.with_raw_response.delete(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.with_raw_response.delete(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_read(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.read(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceReadResponse, instance, path=["response"])
@parametrize
def test_raw_response_read(self, client: Cloudflare) -> None:
response = client.aisearch.instances.with_raw_response.read(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceReadResponse, instance, path=["response"])
@parametrize
def test_streaming_response_read(self, client: Cloudflare) -> None:
with client.aisearch.instances.with_streaming_response.read(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceReadResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_read(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.with_raw_response.read(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.with_raw_response.read(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_stats(self, client: Cloudflare) -> None:
instance = client.aisearch.instances.stats(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceStatsResponse, instance, path=["response"])
@parametrize
def test_raw_response_stats(self, client: Cloudflare) -> None:
response = client.aisearch.instances.with_raw_response.stats(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceStatsResponse, instance, path=["response"])
@parametrize
def test_streaming_response_stats(self, client: Cloudflare) -> None:
with client.aisearch.instances.with_streaming_response.stats(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = response.parse()
assert_matches_type(InstanceStatsResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_stats(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.instances.with_raw_response.stats(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.instances.with_raw_response.stats(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
class TestAsyncInstances:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@parametrize
async def test_method_create(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
)
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
ai_gateway_id="ai_gateway_id",
aisearch_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
chunk=True,
chunk_overlap=0,
chunk_size=64,
embedding_model="@cf/baai/bge-m3",
max_num_results=1,
metadata={
"created_from_aisearch_wizard": True,
"worker_domain": "worker_domain",
},
reranking=True,
reranking_model="@cf/baai/bge-reranker-base",
rewrite_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
rewrite_query=True,
score_threshold=0,
source_params={
"exclude_items": ["/admin/*", "/private/**", "*\\temp\\*"],
"include_items": ["/blog/*", "/docs/**/*.html", "*\\blog\\*.html"],
"prefix": "prefix",
"r2_jurisdiction": "r2_jurisdiction",
"web_crawler": {
"parse_options": {
"include_headers": {"foo": "string"},
"include_images": True,
"use_browser_rendering": True,
},
"parse_type": "sitemap",
"store_options": {
"storage_id": "storage_id",
"r2_jurisdiction": "r2_jurisdiction",
"storage_type": "r2",
},
},
},
)
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.with_raw_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.with_streaming_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceCreateResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_create(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.with_raw_response.create(
account_id="",
id="my-ai-search",
source="source",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
type="r2",
)
@parametrize
async def test_method_update(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
ai_gateway_id="ai_gateway_id",
aisearch_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
cache=True,
cache_threshold="super_strict_match",
chunk=True,
chunk_overlap=0,
chunk_size=64,
embedding_model="@cf/baai/bge-m3",
max_num_results=1,
metadata={
"created_from_aisearch_wizard": True,
"worker_domain": "worker_domain",
},
paused=True,
reranking=True,
reranking_model="@cf/baai/bge-reranker-base",
rewrite_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
rewrite_query=True,
score_threshold=0,
source_params={
"exclude_items": ["/admin/*", "/private/**", "*\\temp\\*"],
"include_items": ["/blog/*", "/docs/**/*.html", "*\\blog\\*.html"],
"prefix": "prefix",
"r2_jurisdiction": "r2_jurisdiction",
"web_crawler": {
"parse_options": {
"include_headers": {"foo": "string"},
"include_images": True,
"use_browser_rendering": True,
},
"parse_type": "sitemap",
"store_options": {
"storage_id": "storage_id",
"r2_jurisdiction": "r2_jurisdiction",
"storage_type": "r2",
},
},
},
summarization=True,
summarization_model="@cf/meta/llama-3.3-70b-instruct-fp8-fast",
system_prompt_aisearch="system_prompt_ai_search",
system_prompt_index_summarization="system_prompt_index_summarization",
system_prompt_rewrite_query="system_prompt_rewrite_query",
token_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
@parametrize
async def test_raw_response_update(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.with_raw_response.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
@parametrize
async def test_streaming_response_update(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.with_streaming_response.update(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceUpdateResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_update(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.with_raw_response.update(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.with_raw_response.update(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_list(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(AsyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=1,
search="search",
)
assert_matches_type(AsyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.with_raw_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.with_streaming_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[InstanceListResponse], instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_list(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.with_raw_response.list(
account_id="",
)
@parametrize
async def test_method_delete(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.delete(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceDeleteResponse, instance, path=["response"])
@parametrize
async def test_raw_response_delete(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.with_raw_response.delete(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceDeleteResponse, instance, path=["response"])
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.with_streaming_response.delete(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceDeleteResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_delete(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.with_raw_response.delete(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.with_raw_response.delete(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_read(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.read(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceReadResponse, instance, path=["response"])
@parametrize
async def test_raw_response_read(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.with_raw_response.read(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceReadResponse, instance, path=["response"])
@parametrize
async def test_streaming_response_read(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.with_streaming_response.read(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceReadResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_read(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.with_raw_response.read(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.with_raw_response.read(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_stats(self, async_client: AsyncCloudflare) -> None:
instance = await async_client.aisearch.instances.stats(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(InstanceStatsResponse, instance, path=["response"])
@parametrize
async def test_raw_response_stats(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.instances.with_raw_response.stats(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceStatsResponse, instance, path=["response"])
@parametrize
async def test_streaming_response_stats(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.instances.with_streaming_response.stats(
id="my-ai-search",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
instance = await response.parse()
assert_matches_type(InstanceStatsResponse, instance, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_stats(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.instances.with_raw_response.stats(
id="my-ai-search",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.instances.with_raw_response.stats(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)

View file

@ -0,0 +1,535 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from cloudflare import Cloudflare, AsyncCloudflare
from tests.utils import assert_matches_type
from cloudflare.pagination import SyncV4PagePaginationArray, AsyncV4PagePaginationArray
from cloudflare.types.aisearch import (
TokenListResponse,
TokenReadResponse,
TokenCreateResponse,
TokenDeleteResponse,
TokenUpdateResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestTokens:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_create(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
)
assert_matches_type(TokenCreateResponse, token, path=["response"])
@parametrize
def test_method_create_with_all_params(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
legacy=True,
)
assert_matches_type(TokenCreateResponse, token, path=["response"])
@parametrize
def test_raw_response_create(self, client: Cloudflare) -> None:
response = client.aisearch.tokens.with_raw_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenCreateResponse, token, path=["response"])
@parametrize
def test_streaming_response_create(self, client: Cloudflare) -> None:
with client.aisearch.tokens.with_streaming_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenCreateResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_create(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.tokens.with_raw_response.create(
account_id="",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
)
@parametrize
def test_method_update(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(TokenUpdateResponse, token, path=["response"])
@parametrize
def test_raw_response_update(self, client: Cloudflare) -> None:
response = client.aisearch.tokens.with_raw_response.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenUpdateResponse, token, path=["response"])
@parametrize
def test_streaming_response_update(self, client: Cloudflare) -> None:
with client.aisearch.tokens.with_streaming_response.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenUpdateResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_update(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.tokens.with_raw_response.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.tokens.with_raw_response.update(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_list(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(SyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=1,
)
assert_matches_type(SyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
@parametrize
def test_raw_response_list(self, client: Cloudflare) -> None:
response = client.aisearch.tokens.with_raw_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(SyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
@parametrize
def test_streaming_response_list(self, client: Cloudflare) -> None:
with client.aisearch.tokens.with_streaming_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(SyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_list(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.tokens.with_raw_response.list(
account_id="",
)
@parametrize
def test_method_delete(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(TokenDeleteResponse, token, path=["response"])
@parametrize
def test_raw_response_delete(self, client: Cloudflare) -> None:
response = client.aisearch.tokens.with_raw_response.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenDeleteResponse, token, path=["response"])
@parametrize
def test_streaming_response_delete(self, client: Cloudflare) -> None:
with client.aisearch.tokens.with_streaming_response.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenDeleteResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_delete(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.tokens.with_raw_response.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.tokens.with_raw_response.delete(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
def test_method_read(self, client: Cloudflare) -> None:
token = client.aisearch.tokens.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(TokenReadResponse, token, path=["response"])
@parametrize
def test_raw_response_read(self, client: Cloudflare) -> None:
response = client.aisearch.tokens.with_raw_response.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenReadResponse, token, path=["response"])
@parametrize
def test_streaming_response_read(self, client: Cloudflare) -> None:
with client.aisearch.tokens.with_streaming_response.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = response.parse()
assert_matches_type(TokenReadResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_read(self, client: Cloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
client.aisearch.tokens.with_raw_response.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.aisearch.tokens.with_raw_response.read(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
class TestAsyncTokens:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@parametrize
async def test_method_create(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
)
assert_matches_type(TokenCreateResponse, token, path=["response"])
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
legacy=True,
)
assert_matches_type(TokenCreateResponse, token, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.tokens.with_raw_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenCreateResponse, token, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.tokens.with_streaming_response.create(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenCreateResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_create(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.create(
account_id="",
cf_api_id="cf_api_id",
cf_api_key="cf_api_key",
name="name",
)
@parametrize
async def test_method_update(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(TokenUpdateResponse, token, path=["response"])
@parametrize
async def test_raw_response_update(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.tokens.with_raw_response.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenUpdateResponse, token, path=["response"])
@parametrize
async def test_streaming_response_update(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.tokens.with_streaming_response.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenUpdateResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_update(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.update(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.update(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_list(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(AsyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
page=1,
per_page=1,
)
assert_matches_type(AsyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.tokens.with_raw_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.tokens.with_streaming_response.list(
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(AsyncV4PagePaginationArray[TokenListResponse], token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_list(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.list(
account_id="",
)
@parametrize
async def test_method_delete(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(TokenDeleteResponse, token, path=["response"])
@parametrize
async def test_raw_response_delete(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.tokens.with_raw_response.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenDeleteResponse, token, path=["response"])
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.tokens.with_streaming_response.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenDeleteResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_delete(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.delete(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.delete(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
@parametrize
async def test_method_read(self, async_client: AsyncCloudflare) -> None:
token = await async_client.aisearch.tokens.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert_matches_type(TokenReadResponse, token, path=["response"])
@parametrize
async def test_raw_response_read(self, async_client: AsyncCloudflare) -> None:
response = await async_client.aisearch.tokens.with_raw_response.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenReadResponse, token, path=["response"])
@parametrize
async def test_streaming_response_read(self, async_client: AsyncCloudflare) -> None:
async with async_client.aisearch.tokens.with_streaming_response.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
token = await response.parse()
assert_matches_type(TokenReadResponse, token, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_read(self, async_client: AsyncCloudflare) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.read(
id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
account_id="",
)
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.aisearch.tokens.with_raw_response.read(
id="",
account_id="c3dc5f0b34a14ff8e1b3ec04895e1b22",
)