Skip to content

Commit f9d2d13

Browse files
fix(api): correct prompt_cache_retention enum value from in-memory to in_memory (#1822)
Co-authored-by: stainless-app[bot] <142633134+stainless-app[bot]@users.noreply.github.com>
1 parent e507a4e commit f9d2d13

9 files changed

Lines changed: 41 additions & 41 deletions

File tree

src/openai/resources/chat/completions/completions.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ def parse(
109109
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
110110
presence_penalty: Optional[float] | Omit = omit,
111111
prompt_cache_key: str | Omit = omit,
112-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
112+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
113113
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
114114
safety_identifier: str | Omit = omit,
115115
seed: Optional[int] | Omit = omit,
@@ -264,7 +264,7 @@ def create(
264264
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
265265
presence_penalty: Optional[float] | Omit = omit,
266266
prompt_cache_key: str | Omit = omit,
267-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
267+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
268268
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
269269
response_format: completion_create_params.ResponseFormat | Omit = omit,
270270
safety_identifier: str | Omit = omit,
@@ -571,7 +571,7 @@ def create(
571571
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
572572
presence_penalty: Optional[float] | Omit = omit,
573573
prompt_cache_key: str | Omit = omit,
574-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
574+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
575575
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
576576
response_format: completion_create_params.ResponseFormat | Omit = omit,
577577
safety_identifier: str | Omit = omit,
@@ -877,7 +877,7 @@ def create(
877877
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
878878
presence_penalty: Optional[float] | Omit = omit,
879879
prompt_cache_key: str | Omit = omit,
880-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
880+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
881881
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
882882
response_format: completion_create_params.ResponseFormat | Omit = omit,
883883
safety_identifier: str | Omit = omit,
@@ -1182,7 +1182,7 @@ def create(
11821182
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
11831183
presence_penalty: Optional[float] | Omit = omit,
11841184
prompt_cache_key: str | Omit = omit,
1185-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1185+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
11861186
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
11871187
response_format: completion_create_params.ResponseFormat | Omit = omit,
11881188
safety_identifier: str | Omit = omit,
@@ -1461,7 +1461,7 @@ def stream(
14611461
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
14621462
presence_penalty: Optional[float] | Omit = omit,
14631463
prompt_cache_key: str | Omit = omit,
1464-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1464+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
14651465
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
14661466
safety_identifier: str | Omit = omit,
14671467
seed: Optional[int] | Omit = omit,
@@ -1612,7 +1612,7 @@ async def parse(
16121612
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
16131613
presence_penalty: Optional[float] | Omit = omit,
16141614
prompt_cache_key: str | Omit = omit,
1615-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1615+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
16161616
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
16171617
safety_identifier: str | Omit = omit,
16181618
seed: Optional[int] | Omit = omit,
@@ -1767,7 +1767,7 @@ async def create(
17671767
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
17681768
presence_penalty: Optional[float] | Omit = omit,
17691769
prompt_cache_key: str | Omit = omit,
1770-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1770+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
17711771
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
17721772
response_format: completion_create_params.ResponseFormat | Omit = omit,
17731773
safety_identifier: str | Omit = omit,
@@ -2074,7 +2074,7 @@ async def create(
20742074
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
20752075
presence_penalty: Optional[float] | Omit = omit,
20762076
prompt_cache_key: str | Omit = omit,
2077-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2077+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
20782078
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
20792079
response_format: completion_create_params.ResponseFormat | Omit = omit,
20802080
safety_identifier: str | Omit = omit,
@@ -2380,7 +2380,7 @@ async def create(
23802380
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
23812381
presence_penalty: Optional[float] | Omit = omit,
23822382
prompt_cache_key: str | Omit = omit,
2383-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2383+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
23842384
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
23852385
response_format: completion_create_params.ResponseFormat | Omit = omit,
23862386
safety_identifier: str | Omit = omit,
@@ -2685,7 +2685,7 @@ async def create(
26852685
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
26862686
presence_penalty: Optional[float] | Omit = omit,
26872687
prompt_cache_key: str | Omit = omit,
2688-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2688+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
26892689
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
26902690
response_format: completion_create_params.ResponseFormat | Omit = omit,
26912691
safety_identifier: str | Omit = omit,
@@ -2964,7 +2964,7 @@ def stream(
29642964
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
29652965
presence_penalty: Optional[float] | Omit = omit,
29662966
prompt_cache_key: str | Omit = omit,
2967-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2967+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
29682968
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
29692969
safety_identifier: str | Omit = omit,
29702970
seed: Optional[int] | Omit = omit,

src/openai/resources/responses/responses.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ def create(
146146
previous_response_id: Optional[str] | Omit = omit,
147147
prompt: Optional[ResponsePromptParam] | Omit = omit,
148148
prompt_cache_key: str | Omit = omit,
149-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
149+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
150150
reasoning: Optional[Reasoning] | Omit = omit,
151151
safety_identifier: str | Omit = omit,
152152
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -396,7 +396,7 @@ def create(
396396
previous_response_id: Optional[str] | Omit = omit,
397397
prompt: Optional[ResponsePromptParam] | Omit = omit,
398398
prompt_cache_key: str | Omit = omit,
399-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
399+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
400400
reasoning: Optional[Reasoning] | Omit = omit,
401401
safety_identifier: str | Omit = omit,
402402
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -645,7 +645,7 @@ def create(
645645
previous_response_id: Optional[str] | Omit = omit,
646646
prompt: Optional[ResponsePromptParam] | Omit = omit,
647647
prompt_cache_key: str | Omit = omit,
648-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
648+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
649649
reasoning: Optional[Reasoning] | Omit = omit,
650650
safety_identifier: str | Omit = omit,
651651
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -892,7 +892,7 @@ def create(
892892
previous_response_id: Optional[str] | Omit = omit,
893893
prompt: Optional[ResponsePromptParam] | Omit = omit,
894894
prompt_cache_key: str | Omit = omit,
895-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
895+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
896896
reasoning: Optional[Reasoning] | Omit = omit,
897897
safety_identifier: str | Omit = omit,
898898
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -995,7 +995,7 @@ def stream(
995995
previous_response_id: Optional[str] | Omit = omit,
996996
prompt: Optional[ResponsePromptParam] | Omit = omit,
997997
prompt_cache_key: str | Omit = omit,
998-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
998+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
999999
reasoning: Optional[Reasoning] | Omit = omit,
10001000
safety_identifier: str | Omit = omit,
10011001
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -1036,7 +1036,7 @@ def stream(
10361036
previous_response_id: Optional[str] | Omit = omit,
10371037
prompt: Optional[ResponsePromptParam] | Omit = omit,
10381038
prompt_cache_key: str | Omit = omit,
1039-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1039+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
10401040
reasoning: Optional[Reasoning] | Omit = omit,
10411041
safety_identifier: str | Omit = omit,
10421042
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -1187,7 +1187,7 @@ def parse(
11871187
previous_response_id: Optional[str] | Omit = omit,
11881188
prompt: Optional[ResponsePromptParam] | Omit = omit,
11891189
prompt_cache_key: str | Omit = omit,
1190-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1190+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
11911191
reasoning: Optional[Reasoning] | Omit = omit,
11921192
safety_identifier: str | Omit = omit,
11931193
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -1823,7 +1823,7 @@ async def create(
18231823
previous_response_id: Optional[str] | Omit = omit,
18241824
prompt: Optional[ResponsePromptParam] | Omit = omit,
18251825
prompt_cache_key: str | Omit = omit,
1826-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1826+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
18271827
reasoning: Optional[Reasoning] | Omit = omit,
18281828
safety_identifier: str | Omit = omit,
18291829
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -2073,7 +2073,7 @@ async def create(
20732073
previous_response_id: Optional[str] | Omit = omit,
20742074
prompt: Optional[ResponsePromptParam] | Omit = omit,
20752075
prompt_cache_key: str | Omit = omit,
2076-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2076+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
20772077
reasoning: Optional[Reasoning] | Omit = omit,
20782078
safety_identifier: str | Omit = omit,
20792079
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -2322,7 +2322,7 @@ async def create(
23222322
previous_response_id: Optional[str] | Omit = omit,
23232323
prompt: Optional[ResponsePromptParam] | Omit = omit,
23242324
prompt_cache_key: str | Omit = omit,
2325-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2325+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
23262326
reasoning: Optional[Reasoning] | Omit = omit,
23272327
safety_identifier: str | Omit = omit,
23282328
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -2569,7 +2569,7 @@ async def create(
25692569
previous_response_id: Optional[str] | Omit = omit,
25702570
prompt: Optional[ResponsePromptParam] | Omit = omit,
25712571
prompt_cache_key: str | Omit = omit,
2572-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2572+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
25732573
reasoning: Optional[Reasoning] | Omit = omit,
25742574
safety_identifier: str | Omit = omit,
25752575
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -2672,7 +2672,7 @@ def stream(
26722672
previous_response_id: Optional[str] | Omit = omit,
26732673
prompt: Optional[ResponsePromptParam] | Omit = omit,
26742674
prompt_cache_key: str | Omit = omit,
2675-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2675+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
26762676
reasoning: Optional[Reasoning] | Omit = omit,
26772677
safety_identifier: str | Omit = omit,
26782678
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -2713,7 +2713,7 @@ def stream(
27132713
previous_response_id: Optional[str] | Omit = omit,
27142714
prompt: Optional[ResponsePromptParam] | Omit = omit,
27152715
prompt_cache_key: str | Omit = omit,
2716-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2716+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
27172717
reasoning: Optional[Reasoning] | Omit = omit,
27182718
safety_identifier: str | Omit = omit,
27192719
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -2868,7 +2868,7 @@ async def parse(
28682868
previous_response_id: Optional[str] | Omit = omit,
28692869
prompt: Optional[ResponsePromptParam] | Omit = omit,
28702870
prompt_cache_key: str | Omit = omit,
2871-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2871+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
28722872
reasoning: Optional[Reasoning] | Omit = omit,
28732873
safety_identifier: str | Omit = omit,
28742874
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -4543,7 +4543,7 @@ def create(
45434543
previous_response_id: Optional[str] | Omit = omit,
45444544
prompt: Optional[ResponsePromptParam] | Omit = omit,
45454545
prompt_cache_key: str | Omit = omit,
4546-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
4546+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
45474547
reasoning: Optional[Reasoning] | Omit = omit,
45484548
safety_identifier: str | Omit = omit,
45494549
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,
@@ -4623,7 +4623,7 @@ async def create(
46234623
previous_response_id: Optional[str] | Omit = omit,
46244624
prompt: Optional[ResponsePromptParam] | Omit = omit,
46254625
prompt_cache_key: str | Omit = omit,
4626-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
4626+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
46274627
reasoning: Optional[Reasoning] | Omit = omit,
46284628
safety_identifier: str | Omit = omit,
46294629
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | Omit = omit,

src/openai/types/chat/completion_create_params.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
185185
[Learn more](https://platform.openai.com/docs/guides/prompt-caching).
186186
"""
187187

188-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]]
188+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]]
189189
"""The retention policy for the prompt cache.
190190
191191
Set to `24h` to enable extended prompt caching, which keeps cached prefixes

src/openai/types/responses/response.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ class Response(BaseModel):
214214
[Learn more](https://platform.openai.com/docs/guides/prompt-caching).
215215
"""
216216

217-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = None
217+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] = None
218218
"""The retention policy for the prompt cache.
219219
220220
Set to `24h` to enable extended prompt caching, which keeps cached prefixes

src/openai/types/responses/response_create_params.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ class ResponseCreateParamsBase(TypedDict, total=False):
152152
[Learn more](https://platform.openai.com/docs/guides/prompt-caching).
153153
"""
154154

155-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]]
155+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]]
156156
"""The retention policy for the prompt cache.
157157
158158
Set to `24h` to enable extended prompt caching, which keeps cached prefixes

src/openai/types/responses/responses_client_event.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ class ResponsesClientEvent(BaseModel):
184184
[Learn more](https://platform.openai.com/docs/guides/prompt-caching).
185185
"""
186186

187-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = None
187+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] = None
188188
"""The retention policy for the prompt cache.
189189
190190
Set to `24h` to enable extended prompt caching, which keeps cached prefixes

src/openai/types/responses/responses_client_event_param.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ class ResponsesClientEventParam(TypedDict, total=False):
185185
[Learn more](https://platform.openai.com/docs/guides/prompt-caching).
186186
"""
187187

188-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]]
188+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]]
189189
"""The retention policy for the prompt cache.
190190
191191
Set to `24h` to enable extended prompt caching, which keeps cached prefixes

tests/api_resources/chat/test_completions.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
7373
},
7474
presence_penalty=-2,
7575
prompt_cache_key="prompt-cache-key-1234",
76-
prompt_cache_retention="in-memory",
76+
prompt_cache_retention="in_memory",
7777
reasoning_effort="none",
7878
response_format={"type": "text"},
7979
safety_identifier="safety-identifier-1234",
@@ -207,7 +207,7 @@ def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
207207
},
208208
presence_penalty=-2,
209209
prompt_cache_key="prompt-cache-key-1234",
210-
prompt_cache_retention="in-memory",
210+
prompt_cache_retention="in_memory",
211211
reasoning_effort="none",
212212
response_format={"type": "text"},
213213
safety_identifier="safety-identifier-1234",
@@ -516,7 +516,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
516516
},
517517
presence_penalty=-2,
518518
prompt_cache_key="prompt-cache-key-1234",
519-
prompt_cache_retention="in-memory",
519+
prompt_cache_retention="in_memory",
520520
reasoning_effort="none",
521521
response_format={"type": "text"},
522522
safety_identifier="safety-identifier-1234",
@@ -650,7 +650,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
650650
},
651651
presence_penalty=-2,
652652
prompt_cache_key="prompt-cache-key-1234",
653-
prompt_cache_retention="in-memory",
653+
prompt_cache_retention="in_memory",
654654
reasoning_effort="none",
655655
response_format={"type": "text"},
656656
safety_identifier="safety-identifier-1234",

0 commit comments

Comments
 (0)