diff --git a/.stats.yml b/.stats.yml index eba4bed69..112197c7a 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 1706 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-b195f55634f5c9a77fe4b6028531bb315e7eb6a946ed2f2ef4a9f03367a9e688.yml -openapi_spec_hash: 5dc0b6788e545012436a0102ad4c66c4 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-1a97957875bffc94f15a56f3a2c04c8c9587bef638bd54190384c581bf2ab8db.yml +openapi_spec_hash: 9473d561903fb7947989456228093312 config_hash: 67f412c990647f3cb598378fa22a9db5 diff --git a/tests/api_resources/test_ai.py b/tests/api_resources/test_ai.py index 26e73537d..3e168ba5d 100644 --- a/tests/api_resources/test_ai.py +++ b/tests/api_resources/test_ai.py @@ -447,10 +447,10 @@ class TestAI: model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", prompt="x", - frequency_penalty=0, + frequency_penalty=-2, lora="lora", max_tokens=0, - presence_penalty=0, + presence_penalty=-2, raw=True, repetition_penalty=0, response_format={ @@ -461,7 +461,7 @@ class TestAI: stream=True, temperature=0, top_k=1, - top_p=0, + top_p=0.001, ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @@ -534,7 +534,7 @@ class TestAI: "role": "role", } ], - frequency_penalty=0, + frequency_penalty=-2, functions=[ { "code": "code", @@ -542,7 +542,7 @@ class TestAI: } ], max_tokens=0, - presence_penalty=0, + presence_penalty=-2, raw=True, repetition_penalty=0, response_format={ @@ -569,7 +569,7 @@ class TestAI: } ], top_k=1, - top_p=0, + top_p=0.001, ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @@ -1459,10 +1459,10 @@ class TestAsyncAI: model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", prompt="x", - frequency_penalty=0, + frequency_penalty=-2, lora="lora", max_tokens=0, - presence_penalty=0, + presence_penalty=-2, raw=True, repetition_penalty=0, response_format={ @@ -1473,7 +1473,7 @@ class TestAsyncAI: stream=True, temperature=0, top_k=1, - top_p=0, + top_p=0.001, ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @@ -1546,7 +1546,7 @@ class TestAsyncAI: "role": "role", } ], - frequency_penalty=0, + frequency_penalty=-2, functions=[ { "code": "code", @@ -1554,7 +1554,7 @@ class TestAsyncAI: } ], max_tokens=0, - presence_penalty=0, + presence_penalty=-2, raw=True, repetition_penalty=0, response_format={ @@ -1581,7 +1581,7 @@ class TestAsyncAI: } ], top_k=1, - top_p=0, + top_p=0.001, ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"])