Skip to content

Commit 94c88b8

Browse files
apcha-oaicameron-mcateerstainless-app[bot]
authored
release: 2.33.0 (#3119)
* fix(api): correct prompt_cache_retention enum value from in-memory to in_memory (#1822) Co-authored-by: stainless-app[bot] <142633134+stainless-app[bot]@users.noreply.github.com> * chore(ci): remove release-doctor workflow * feat(api): api update * codegen metadata * release: 2.33.0 --------- Co-authored-by: Cameron McAteer <246350779+cameron-mcateer@users.noreply.github.com> Co-authored-by: stainless-app[bot] <142633134+stainless-app[bot]@users.noreply.github.com>
2 parents e507a4e + c5b099c commit 94c88b8

17 files changed

Lines changed: 78 additions & 95 deletions

.github/workflows/release-doctor.yml

Lines changed: 0 additions & 23 deletions
This file was deleted.

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
".": "2.32.0"
2+
".": "2.33.0"
33
}

.stats.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 152
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-7c540cce6eb30401259f4831ea9803b6d88501605d13734f98212cbb3b199e10.yml
3-
openapi_spec_hash: 06e656be22bbb92689954253668b42fc
4-
config_hash: 1a88b104658b6c854117996c080ebe6b
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-64c6ba619ccbf87e56b4f464230d04401fd78ad924d2606176309d19ca281af5.yml
3+
openapi_spec_hash: 5e4f2073040a12c26ce58e86a72fe47e
4+
config_hash: 50c98d8869a8cfdee2ab7dc664c4b6fe

CHANGELOG.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,23 @@
11
# Changelog
22

3+
## 2.33.0 (2026-04-28)
4+
5+
Full Changelog: [v2.32.0...v2.33.0](https://github.com/openai/openai-python/compare/v2.32.0...v2.33.0)
6+
7+
### Features
8+
9+
* **api:** api update ([18f834a](https://github.com/openai/openai-python/commit/18f834a54f92ea827452471a46a4f442f251e2c8))
10+
11+
12+
### Bug Fixes
13+
14+
* **api:** correct prompt_cache_retention enum value from in-memory to in_memory ([#1822](https://github.com/openai/openai-python/issues/1822)) ([f9d2d13](https://github.com/openai/openai-python/commit/f9d2d1359688a6247ecba858fc687173c480c9c8))
15+
16+
17+
### Chores
18+
19+
* **ci:** remove release-doctor workflow ([00b2091](https://github.com/openai/openai-python/commit/00b20910e3539842f21d86ab5928fb5216d3a765))
20+
321
## 2.32.0 (2026-04-15)
422

523
Full Changelog: [v2.31.0...v2.32.0](https://github.com/openai/openai-python/compare/v2.31.0...v2.32.0)

bin/check-release-environment

Lines changed: 0 additions & 25 deletions
This file was deleted.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "openai"
3-
version = "2.32.0"
3+
version = "2.33.0"
44
description = "The official Python library for the openai API"
55
dynamic = ["readme"]
66
license = "Apache-2.0"

src/openai/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

33
__title__ = "openai"
4-
__version__ = "2.32.0" # x-release-please-version
4+
__version__ = "2.33.0" # x-release-please-version

src/openai/resources/chat/completions/completions.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ def parse(
109109
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
110110
presence_penalty: Optional[float] | Omit = omit,
111111
prompt_cache_key: str | Omit = omit,
112-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
112+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
113113
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
114114
safety_identifier: str | Omit = omit,
115115
seed: Optional[int] | Omit = omit,
@@ -264,7 +264,7 @@ def create(
264264
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
265265
presence_penalty: Optional[float] | Omit = omit,
266266
prompt_cache_key: str | Omit = omit,
267-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
267+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
268268
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
269269
response_format: completion_create_params.ResponseFormat | Omit = omit,
270270
safety_identifier: str | Omit = omit,
@@ -571,7 +571,7 @@ def create(
571571
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
572572
presence_penalty: Optional[float] | Omit = omit,
573573
prompt_cache_key: str | Omit = omit,
574-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
574+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
575575
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
576576
response_format: completion_create_params.ResponseFormat | Omit = omit,
577577
safety_identifier: str | Omit = omit,
@@ -877,7 +877,7 @@ def create(
877877
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
878878
presence_penalty: Optional[float] | Omit = omit,
879879
prompt_cache_key: str | Omit = omit,
880-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
880+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
881881
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
882882
response_format: completion_create_params.ResponseFormat | Omit = omit,
883883
safety_identifier: str | Omit = omit,
@@ -1182,7 +1182,7 @@ def create(
11821182
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
11831183
presence_penalty: Optional[float] | Omit = omit,
11841184
prompt_cache_key: str | Omit = omit,
1185-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1185+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
11861186
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
11871187
response_format: completion_create_params.ResponseFormat | Omit = omit,
11881188
safety_identifier: str | Omit = omit,
@@ -1461,7 +1461,7 @@ def stream(
14611461
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
14621462
presence_penalty: Optional[float] | Omit = omit,
14631463
prompt_cache_key: str | Omit = omit,
1464-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1464+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
14651465
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
14661466
safety_identifier: str | Omit = omit,
14671467
seed: Optional[int] | Omit = omit,
@@ -1612,7 +1612,7 @@ async def parse(
16121612
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
16131613
presence_penalty: Optional[float] | Omit = omit,
16141614
prompt_cache_key: str | Omit = omit,
1615-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1615+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
16161616
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
16171617
safety_identifier: str | Omit = omit,
16181618
seed: Optional[int] | Omit = omit,
@@ -1767,7 +1767,7 @@ async def create(
17671767
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
17681768
presence_penalty: Optional[float] | Omit = omit,
17691769
prompt_cache_key: str | Omit = omit,
1770-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
1770+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
17711771
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
17721772
response_format: completion_create_params.ResponseFormat | Omit = omit,
17731773
safety_identifier: str | Omit = omit,
@@ -2074,7 +2074,7 @@ async def create(
20742074
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
20752075
presence_penalty: Optional[float] | Omit = omit,
20762076
prompt_cache_key: str | Omit = omit,
2077-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2077+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
20782078
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
20792079
response_format: completion_create_params.ResponseFormat | Omit = omit,
20802080
safety_identifier: str | Omit = omit,
@@ -2380,7 +2380,7 @@ async def create(
23802380
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
23812381
presence_penalty: Optional[float] | Omit = omit,
23822382
prompt_cache_key: str | Omit = omit,
2383-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2383+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
23842384
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
23852385
response_format: completion_create_params.ResponseFormat | Omit = omit,
23862386
safety_identifier: str | Omit = omit,
@@ -2685,7 +2685,7 @@ async def create(
26852685
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
26862686
presence_penalty: Optional[float] | Omit = omit,
26872687
prompt_cache_key: str | Omit = omit,
2688-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2688+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
26892689
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
26902690
response_format: completion_create_params.ResponseFormat | Omit = omit,
26912691
safety_identifier: str | Omit = omit,
@@ -2964,7 +2964,7 @@ def stream(
29642964
prediction: Optional[ChatCompletionPredictionContentParam] | Omit = omit,
29652965
presence_penalty: Optional[float] | Omit = omit,
29662966
prompt_cache_key: str | Omit = omit,
2967-
prompt_cache_retention: Optional[Literal["in-memory", "24h"]] | Omit = omit,
2967+
prompt_cache_retention: Optional[Literal["in_memory", "24h"]] | Omit = omit,
29682968
reasoning_effort: Optional[ReasoningEffort] | Omit = omit,
29692969
safety_identifier: str | Omit = omit,
29702970
seed: Optional[int] | Omit = omit,

0 commit comments

Comments
 (0)