1
1
lockVersion: 2.0.0
2
2
id: 2d5dbf5a-62be-411a-9c7b-bc7b6dc79e13
3
3
management:
4
- docChecksum: 6657dd3e876a909472f364dc1fe34d72
4
+ docChecksum: eea382efbb6545b93db6c5b075e96409
5
5
docVersion: 0.0.0
6
- speakeasyVersion: 1.480.0
7
- generationVersion: 2.499.0
8
- releaseVersion: 0.10 .0
9
- configChecksum: 2b2d2389ee9ff1be5cd354baf7c62739
6
+ speakeasyVersion: 1.514.1
7
+ generationVersion: 2.546.3
8
+ releaseVersion: 0.11 .0
9
+ configChecksum: 7dc5d91a976db7fc235765e2b9a611a9
10
10
repoURL: https://github.com/livepeer/livepeer-ai-python.git
11
11
installationURL: https://github.com/livepeer/livepeer-ai-python.git
12
12
published: true
13
13
features:
14
14
python:
15
15
additionalDependencies: 1.0.0
16
16
constsAndDefaults: 1.0.5
17
- core: 5.10.7
17
+ core: 5.12.3
18
18
defaultEnabledRetries: 0.2.0
19
19
envVarSecurityUsage: 0.3.2
20
- globalSecurity: 3.0.2
20
+ flattening: 3.1.1
21
+ globalSecurity: 3.0.3
21
22
globalSecurityCallbacks: 1.0.0
22
23
globalSecurityFlattening: 1.0.0
23
24
globalServerURLs: 3.1.0
24
25
multipartFileContentType: 1.0.0
25
26
nameOverrides: 3.0.1
26
27
responseFormat: 1.0.1
27
28
retries: 3.0.2
28
- sdkHooks: 1.0.0
29
+ sdkHooks: 1.0.1
29
30
unions: 3.0.4
30
31
uploadStreams: 1.0.0
31
32
generatedFiles:
@@ -75,6 +76,7 @@ generatedFiles:
75
76
- docs/models/operations/genimagetoimageresponse.md
76
77
- docs/models/operations/genimagetotextresponse.md
77
78
- docs/models/operations/genimagetovideoresponse.md
79
+ - docs/models/operations/genlivevideotovideorequest.md
78
80
- docs/models/operations/genlivevideotovideoresponse.md
79
81
- docs/models/operations/genllmresponse.md
80
82
- docs/models/operations/gensegmentanything2response.md
@@ -88,7 +90,7 @@ generatedFiles:
88
90
- py.typed
89
91
- pylintrc
90
92
- pyproject.toml
91
- - scripts/prepare-readme .py
93
+ - scripts/prepare_readme .py
92
94
- scripts/publish.sh
93
95
- src/livepeer_ai/__init__.py
94
96
- src/livepeer_ai/_hooks/__init__.py
@@ -98,6 +100,7 @@ generatedFiles:
98
100
- src/livepeer_ai/basesdk.py
99
101
- src/livepeer_ai/generate.py
100
102
- src/livepeer_ai/httpclient.py
103
+ - src/livepeer_ai/models/__init__.py
101
104
- src/livepeer_ai/models/components/__init__.py
102
105
- src/livepeer_ai/models/components/apierror.py
103
106
- src/livepeer_ai/models/components/audioresponse.py
@@ -179,7 +182,7 @@ examples:
179
182
genImageToImage:
180
183
speakeasy-default-gen-image-to-image:
181
184
requestBody:
182
- multipart/form-data: {"prompt": "<value>", "image": {"": "x-file: example.file" }, "model_id": "", "loras": "", "strength": 0.8, "guidance_scale": 7.5, "image_guidance_scale": 1.5, "negative_prompt": "", "safety_check": true, "num_inference_steps": 100, "num_images_per_prompt": 1}
185
+ multipart/form-data: {"prompt": "<value>", "image": {}, "model_id": "", "loras": "", "strength": 0.8, "guidance_scale": 7.5, "image_guidance_scale": 1.5, "negative_prompt": "", "safety_check": true, "num_inference_steps": 100, "num_images_per_prompt": 1}
183
186
responses:
184
187
"200":
185
188
application/json: {"images": [{"url": "https://selfish-operating.name/", "seed": 976514, "nsfw": false}]}
@@ -192,7 +195,7 @@ examples:
192
195
genImageToVideo:
193
196
speakeasy-default-gen-image-to-video:
194
197
requestBody:
195
- multipart/form-data: {"image": {"": "x-file: example.file" }, "model_id": "", "height": 576, "width": 1024, "fps": 6, "motion_bucket_id": 127, "noise_aug_strength": 0.02, "safety_check": true, "num_inference_steps": 25}
198
+ multipart/form-data: {"image": {}, "model_id": "", "height": 576, "width": 1024, "fps": 6, "motion_bucket_id": 127, "noise_aug_strength": 0.02, "safety_check": true, "num_inference_steps": 25}
196
199
responses:
197
200
"200":
198
201
application/json: {"images": [{"url": "https://low-handover.name/", "seed": 87160, "nsfw": true}]}
@@ -205,7 +208,7 @@ examples:
205
208
genUpscale:
206
209
speakeasy-default-gen-upscale:
207
210
requestBody:
208
- multipart/form-data: {"prompt": "<value>", "image": {"": "x-file: example.file" }, "model_id": "", "safety_check": true, "num_inference_steps": 75}
211
+ multipart/form-data: {"prompt": "<value>", "image": {}, "model_id": "", "safety_check": true, "num_inference_steps": 75}
209
212
responses:
210
213
"200":
211
214
application/json: {"images": [{"url": "https://bogus-typewriter.net", "seed": 311567, "nsfw": false}]}
@@ -218,7 +221,7 @@ examples:
218
221
genAudioToText:
219
222
speakeasy-default-gen-audio-to-text:
220
223
requestBody:
221
- multipart/form-data: {"audio": {"": "x-file: example.file" }, "model_id": "", "return_timestamps": "true"}
224
+ multipart/form-data: {"audio": {}, "model_id": "", "return_timestamps": "true"}
222
225
responses:
223
226
"200":
224
227
application/json: {"text": "<value>", "chunks": [{"timestamp": ["<value>", "<value>"], "text": "<value>"}, {"timestamp": [], "text": "<value>"}]}
@@ -231,7 +234,7 @@ examples:
231
234
genSegmentAnything2:
232
235
speakeasy-default-gen-segment-anything2:
233
236
requestBody:
234
- multipart/form-data: {"image": {"": "x-file: example.file" }, "model_id": "", "multimask_output": true, "return_logits": true, "normalize_coords": true}
237
+ multipart/form-data: {"image": {}, "model_id": "", "multimask_output": true, "return_logits": true, "normalize_coords": true}
235
238
responses:
236
239
"200":
237
240
application/json: {"masks": "<value>", "scores": "<value>", "logits": "<value>"}
@@ -257,7 +260,7 @@ examples:
257
260
genImageToText:
258
261
speakeasy-default-gen-image-to-text:
259
262
requestBody:
260
- multipart/form-data: {"image": {"": "x-file: example.file" }, "prompt": "", "model_id": ""}
263
+ multipart/form-data: {"image": {}, "prompt": "", "model_id": ""}
261
264
responses:
262
265
"200":
263
266
application/json: {"text": "<value>"}
@@ -270,10 +273,10 @@ examples:
270
273
genLiveVideoToVideo:
271
274
speakeasy-default-gen-live-video-to-video:
272
275
requestBody:
273
- application/json: {"subscribe_url": "https://soulful-lava.org/", "publish_url": "https://vain-tabletop.biz", "control_url": "", "events_url": "", "model_id": ""}
276
+ application/json: {"subscribe_url": "https://soulful-lava.org/", "publish_url": "https://vain-tabletop.biz", "control_url": "", "events_url": "", "model_id": "", "gateway_request_id": "", "stream_id": "" }
274
277
responses:
275
278
"200":
276
- application/json: {"subscribe_url": "https://vain-kiss.name", "publish_url": "https://frail-duffel.com", "control_url": "", "events_url": ""}
279
+ application/json: {"subscribe_url": "https://vain-kiss.name", "publish_url": "https://frail-duffel.com", "control_url": "", "events_url": "", "request_id": "" }
277
280
"400":
278
281
application/json: {"detail": {"msg": "<value>"}}
279
282
"422":
0 commit comments