Skip to content

Commit 93af129

Browse files
chore(internal): codegen related update
1 parent 15afa21 commit 93af129

10 files changed

Lines changed: 82 additions & 78 deletions

File tree

.github/workflows/ci.yml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,14 +61,18 @@ jobs:
6161
run: rye build
6262

6363
- name: Get GitHub OIDC Token
64-
if: github.repository == 'stainless-sdks/openai-python'
64+
if: |-
65+
github.repository == 'stainless-sdks/openai-python' &&
66+
!startsWith(github.ref, 'refs/heads/stl/')
6567
id: github-oidc
6668
uses: actions/github-script@v8
6769
with:
6870
script: core.setOutput('github_token', await core.getIDToken());
6971

7072
- name: Upload tarball
71-
if: github.repository == 'stainless-sdks/openai-python'
73+
if: |-
74+
github.repository == 'stainless-sdks/openai-python' &&
75+
!startsWith(github.ref, 'refs/heads/stl/')
7276
env:
7377
URL: https://pkg.stainless.com/s
7478
AUTH: ${{ steps.github-oidc.outputs.github_token }}

tests/api_resources/audio/test_transcriptions.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,15 @@ class TestTranscriptions:
2020
@parametrize
2121
def test_method_create_overload_1(self, client: OpenAI) -> None:
2222
transcription = client.audio.transcriptions.create(
23-
file=b"raw file contents",
23+
file=b"Example data",
2424
model="gpt-4o-transcribe",
2525
)
2626
assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
2727

2828
@parametrize
2929
def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
3030
transcription = client.audio.transcriptions.create(
31-
file=b"raw file contents",
31+
file=b"Example data",
3232
model="gpt-4o-transcribe",
3333
chunking_strategy="auto",
3434
include=["logprobs"],
@@ -46,7 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
4646
@parametrize
4747
def test_raw_response_create_overload_1(self, client: OpenAI) -> None:
4848
response = client.audio.transcriptions.with_raw_response.create(
49-
file=b"raw file contents",
49+
file=b"Example data",
5050
model="gpt-4o-transcribe",
5151
)
5252

@@ -58,7 +58,7 @@ def test_raw_response_create_overload_1(self, client: OpenAI) -> None:
5858
@parametrize
5959
def test_streaming_response_create_overload_1(self, client: OpenAI) -> None:
6060
with client.audio.transcriptions.with_streaming_response.create(
61-
file=b"raw file contents",
61+
file=b"Example data",
6262
model="gpt-4o-transcribe",
6363
) as response:
6464
assert not response.is_closed
@@ -72,7 +72,7 @@ def test_streaming_response_create_overload_1(self, client: OpenAI) -> None:
7272
@parametrize
7373
def test_method_create_overload_2(self, client: OpenAI) -> None:
7474
transcription_stream = client.audio.transcriptions.create(
75-
file=b"raw file contents",
75+
file=b"Example data",
7676
model="gpt-4o-transcribe",
7777
stream=True,
7878
)
@@ -81,7 +81,7 @@ def test_method_create_overload_2(self, client: OpenAI) -> None:
8181
@parametrize
8282
def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
8383
transcription_stream = client.audio.transcriptions.create(
84-
file=b"raw file contents",
84+
file=b"Example data",
8585
model="gpt-4o-transcribe",
8686
stream=True,
8787
chunking_strategy="auto",
@@ -99,7 +99,7 @@ def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
9999
@parametrize
100100
def test_raw_response_create_overload_2(self, client: OpenAI) -> None:
101101
response = client.audio.transcriptions.with_raw_response.create(
102-
file=b"raw file contents",
102+
file=b"Example data",
103103
model="gpt-4o-transcribe",
104104
stream=True,
105105
)
@@ -111,7 +111,7 @@ def test_raw_response_create_overload_2(self, client: OpenAI) -> None:
111111
@parametrize
112112
def test_streaming_response_create_overload_2(self, client: OpenAI) -> None:
113113
with client.audio.transcriptions.with_streaming_response.create(
114-
file=b"raw file contents",
114+
file=b"Example data",
115115
model="gpt-4o-transcribe",
116116
stream=True,
117117
) as response:
@@ -132,15 +132,15 @@ class TestAsyncTranscriptions:
132132
@parametrize
133133
async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None:
134134
transcription = await async_client.audio.transcriptions.create(
135-
file=b"raw file contents",
135+
file=b"Example data",
136136
model="gpt-4o-transcribe",
137137
)
138138
assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
139139

140140
@parametrize
141141
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
142142
transcription = await async_client.audio.transcriptions.create(
143-
file=b"raw file contents",
143+
file=b"Example data",
144144
model="gpt-4o-transcribe",
145145
chunking_strategy="auto",
146146
include=["logprobs"],
@@ -158,7 +158,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
158158
@parametrize
159159
async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
160160
response = await async_client.audio.transcriptions.with_raw_response.create(
161-
file=b"raw file contents",
161+
file=b"Example data",
162162
model="gpt-4o-transcribe",
163163
)
164164

@@ -170,7 +170,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -
170170
@parametrize
171171
async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
172172
async with async_client.audio.transcriptions.with_streaming_response.create(
173-
file=b"raw file contents",
173+
file=b"Example data",
174174
model="gpt-4o-transcribe",
175175
) as response:
176176
assert not response.is_closed
@@ -184,7 +184,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncOpe
184184
@parametrize
185185
async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None:
186186
transcription_stream = await async_client.audio.transcriptions.create(
187-
file=b"raw file contents",
187+
file=b"Example data",
188188
model="gpt-4o-transcribe",
189189
stream=True,
190190
)
@@ -193,7 +193,7 @@ async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None
193193
@parametrize
194194
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
195195
transcription_stream = await async_client.audio.transcriptions.create(
196-
file=b"raw file contents",
196+
file=b"Example data",
197197
model="gpt-4o-transcribe",
198198
stream=True,
199199
chunking_strategy="auto",
@@ -211,7 +211,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
211211
@parametrize
212212
async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
213213
response = await async_client.audio.transcriptions.with_raw_response.create(
214-
file=b"raw file contents",
214+
file=b"Example data",
215215
model="gpt-4o-transcribe",
216216
stream=True,
217217
)
@@ -223,7 +223,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -
223223
@parametrize
224224
async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
225225
async with async_client.audio.transcriptions.with_streaming_response.create(
226-
file=b"raw file contents",
226+
file=b"Example data",
227227
model="gpt-4o-transcribe",
228228
stream=True,
229229
) as response:

tests/api_resources/audio/test_translations.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,15 @@ class TestTranslations:
2020
@parametrize
2121
def test_method_create(self, client: OpenAI) -> None:
2222
translation = client.audio.translations.create(
23-
file=b"raw file contents",
23+
file=b"Example data",
2424
model="whisper-1",
2525
)
2626
assert_matches_type(TranslationCreateResponse, translation, path=["response"])
2727

2828
@parametrize
2929
def test_method_create_with_all_params(self, client: OpenAI) -> None:
3030
translation = client.audio.translations.create(
31-
file=b"raw file contents",
31+
file=b"Example data",
3232
model="whisper-1",
3333
prompt="prompt",
3434
response_format="json",
@@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: OpenAI) -> None:
3939
@parametrize
4040
def test_raw_response_create(self, client: OpenAI) -> None:
4141
response = client.audio.translations.with_raw_response.create(
42-
file=b"raw file contents",
42+
file=b"Example data",
4343
model="whisper-1",
4444
)
4545

@@ -51,7 +51,7 @@ def test_raw_response_create(self, client: OpenAI) -> None:
5151
@parametrize
5252
def test_streaming_response_create(self, client: OpenAI) -> None:
5353
with client.audio.translations.with_streaming_response.create(
54-
file=b"raw file contents",
54+
file=b"Example data",
5555
model="whisper-1",
5656
) as response:
5757
assert not response.is_closed
@@ -71,15 +71,15 @@ class TestAsyncTranslations:
7171
@parametrize
7272
async def test_method_create(self, async_client: AsyncOpenAI) -> None:
7373
translation = await async_client.audio.translations.create(
74-
file=b"raw file contents",
74+
file=b"Example data",
7575
model="whisper-1",
7676
)
7777
assert_matches_type(TranslationCreateResponse, translation, path=["response"])
7878

7979
@parametrize
8080
async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
8181
translation = await async_client.audio.translations.create(
82-
file=b"raw file contents",
82+
file=b"Example data",
8383
model="whisper-1",
8484
prompt="prompt",
8585
response_format="json",
@@ -90,7 +90,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) ->
9090
@parametrize
9191
async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
9292
response = await async_client.audio.translations.with_raw_response.create(
93-
file=b"raw file contents",
93+
file=b"Example data",
9494
model="whisper-1",
9595
)
9696

@@ -102,7 +102,7 @@ async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
102102
@parametrize
103103
async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
104104
async with async_client.audio.translations.with_streaming_response.create(
105-
file=b"raw file contents",
105+
file=b"Example data",
106106
model="whisper-1",
107107
) as response:
108108
assert not response.is_closed

tests/api_resources/containers/test_files.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def test_method_create(self, client: OpenAI) -> None:
3333
def test_method_create_with_all_params(self, client: OpenAI) -> None:
3434
file = client.containers.files.create(
3535
container_id="container_id",
36-
file=b"raw file contents",
36+
file=b"Example data",
3737
file_id="file_id",
3838
)
3939
assert_matches_type(FileCreateResponse, file, path=["response"])
@@ -230,7 +230,7 @@ async def test_method_create(self, async_client: AsyncOpenAI) -> None:
230230
async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
231231
file = await async_client.containers.files.create(
232232
container_id="container_id",
233-
file=b"raw file contents",
233+
file=b"Example data",
234234
file_id="file_id",
235235
)
236236
assert_matches_type(FileCreateResponse, file, path=["response"])

tests/api_resources/skills/test_versions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ def test_method_create_with_all_params(self, client: OpenAI) -> None:
3030
version = client.skills.versions.create(
3131
skill_id="skill_123",
3232
default=True,
33-
files=[b"raw file contents"],
33+
files=[b"Example data"],
3434
)
3535
assert_matches_type(SkillVersion, version, path=["response"])
3636

@@ -227,7 +227,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) ->
227227
version = await async_client.skills.versions.create(
228228
skill_id="skill_123",
229229
default=True,
230-
files=[b"raw file contents"],
230+
files=[b"Example data"],
231231
)
232232
assert_matches_type(SkillVersion, version, path=["response"])
233233

tests/api_resources/test_files.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,15 @@ class TestFiles:
2626
@parametrize
2727
def test_method_create(self, client: OpenAI) -> None:
2828
file = client.files.create(
29-
file=b"raw file contents",
29+
file=b"Example data",
3030
purpose="assistants",
3131
)
3232
assert_matches_type(FileObject, file, path=["response"])
3333

3434
@parametrize
3535
def test_method_create_with_all_params(self, client: OpenAI) -> None:
3636
file = client.files.create(
37-
file=b"raw file contents",
37+
file=b"Example data",
3838
purpose="assistants",
3939
expires_after={
4040
"anchor": "created_at",
@@ -46,7 +46,7 @@ def test_method_create_with_all_params(self, client: OpenAI) -> None:
4646
@parametrize
4747
def test_raw_response_create(self, client: OpenAI) -> None:
4848
response = client.files.with_raw_response.create(
49-
file=b"raw file contents",
49+
file=b"Example data",
5050
purpose="assistants",
5151
)
5252

@@ -58,7 +58,7 @@ def test_raw_response_create(self, client: OpenAI) -> None:
5858
@parametrize
5959
def test_streaming_response_create(self, client: OpenAI) -> None:
6060
with client.files.with_streaming_response.create(
61-
file=b"raw file contents",
61+
file=b"Example data",
6262
purpose="assistants",
6363
) as response:
6464
assert not response.is_closed
@@ -279,15 +279,15 @@ class TestAsyncFiles:
279279
@parametrize
280280
async def test_method_create(self, async_client: AsyncOpenAI) -> None:
281281
file = await async_client.files.create(
282-
file=b"raw file contents",
282+
file=b"Example data",
283283
purpose="assistants",
284284
)
285285
assert_matches_type(FileObject, file, path=["response"])
286286

287287
@parametrize
288288
async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
289289
file = await async_client.files.create(
290-
file=b"raw file contents",
290+
file=b"Example data",
291291
purpose="assistants",
292292
expires_after={
293293
"anchor": "created_at",
@@ -299,7 +299,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) ->
299299
@parametrize
300300
async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
301301
response = await async_client.files.with_raw_response.create(
302-
file=b"raw file contents",
302+
file=b"Example data",
303303
purpose="assistants",
304304
)
305305

@@ -311,7 +311,7 @@ async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
311311
@parametrize
312312
async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
313313
async with async_client.files.with_streaming_response.create(
314-
file=b"raw file contents",
314+
file=b"Example data",
315315
purpose="assistants",
316316
) as response:
317317
assert not response.is_closed

0 commit comments

Comments
 (0)