|
8 | 8 |
|
9 | 9 | import httpx |
10 | 10 |
|
11 | | -from . import resources, _exceptions |
| 11 | +from . import _exceptions |
12 | 12 | from ._qs import Querystring |
13 | 13 | from ._types import ( |
14 | 14 | NOT_GIVEN, |
|
24 | 24 | get_async_library, |
25 | 25 | ) |
26 | 26 | from ._version import __version__ |
| 27 | +from .resources import ping, credits |
27 | 28 | from ._streaming import Stream as Stream, AsyncStream as AsyncStream |
28 | 29 | from ._exceptions import LumaAIError, APIStatusError |
29 | 30 | from ._base_client import ( |
30 | 31 | DEFAULT_MAX_RETRIES, |
31 | 32 | SyncAPIClient, |
32 | 33 | AsyncAPIClient, |
33 | 34 | ) |
| 35 | +from .resources.generations import generations |
34 | 36 |
|
35 | | -__all__ = [ |
36 | | - "Timeout", |
37 | | - "Transport", |
38 | | - "ProxiesTypes", |
39 | | - "RequestOptions", |
40 | | - "resources", |
41 | | - "LumaAI", |
42 | | - "AsyncLumaAI", |
43 | | - "Client", |
44 | | - "AsyncClient", |
45 | | -] |
| 37 | +__all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "LumaAI", "AsyncLumaAI", "Client", "AsyncClient"] |
46 | 38 |
|
47 | 39 |
|
48 | 40 | class LumaAI(SyncAPIClient): |
49 | | - generations: resources.GenerationsResource |
50 | | - ping: resources.PingResource |
51 | | - credits: resources.CreditsResource |
| 41 | + generations: generations.GenerationsResource |
| 42 | + ping: ping.PingResource |
| 43 | + credits: credits.CreditsResource |
52 | 44 | with_raw_response: LumaAIWithRawResponse |
53 | 45 | with_streaming_response: LumaAIWithStreamedResponse |
54 | 46 |
|
@@ -106,9 +98,9 @@ def __init__( |
106 | 98 | _strict_response_validation=_strict_response_validation, |
107 | 99 | ) |
108 | 100 |
|
109 | | - self.generations = resources.GenerationsResource(self) |
110 | | - self.ping = resources.PingResource(self) |
111 | | - self.credits = resources.CreditsResource(self) |
| 101 | + self.generations = generations.GenerationsResource(self) |
| 102 | + self.ping = ping.PingResource(self) |
| 103 | + self.credits = credits.CreditsResource(self) |
112 | 104 | self.with_raw_response = LumaAIWithRawResponse(self) |
113 | 105 | self.with_streaming_response = LumaAIWithStreamedResponse(self) |
114 | 106 |
|
@@ -218,9 +210,9 @@ def _make_status_error( |
218 | 210 |
|
219 | 211 |
|
220 | 212 | class AsyncLumaAI(AsyncAPIClient): |
221 | | - generations: resources.AsyncGenerationsResource |
222 | | - ping: resources.AsyncPingResource |
223 | | - credits: resources.AsyncCreditsResource |
| 213 | + generations: generations.AsyncGenerationsResource |
| 214 | + ping: ping.AsyncPingResource |
| 215 | + credits: credits.AsyncCreditsResource |
224 | 216 | with_raw_response: AsyncLumaAIWithRawResponse |
225 | 217 | with_streaming_response: AsyncLumaAIWithStreamedResponse |
226 | 218 |
|
@@ -278,9 +270,9 @@ def __init__( |
278 | 270 | _strict_response_validation=_strict_response_validation, |
279 | 271 | ) |
280 | 272 |
|
281 | | - self.generations = resources.AsyncGenerationsResource(self) |
282 | | - self.ping = resources.AsyncPingResource(self) |
283 | | - self.credits = resources.AsyncCreditsResource(self) |
| 273 | + self.generations = generations.AsyncGenerationsResource(self) |
| 274 | + self.ping = ping.AsyncPingResource(self) |
| 275 | + self.credits = credits.AsyncCreditsResource(self) |
284 | 276 | self.with_raw_response = AsyncLumaAIWithRawResponse(self) |
285 | 277 | self.with_streaming_response = AsyncLumaAIWithStreamedResponse(self) |
286 | 278 |
|
@@ -391,30 +383,30 @@ def _make_status_error( |
391 | 383 |
|
392 | 384 | class LumaAIWithRawResponse: |
393 | 385 | def __init__(self, client: LumaAI) -> None: |
394 | | - self.generations = resources.GenerationsResourceWithRawResponse(client.generations) |
395 | | - self.ping = resources.PingResourceWithRawResponse(client.ping) |
396 | | - self.credits = resources.CreditsResourceWithRawResponse(client.credits) |
| 386 | + self.generations = generations.GenerationsResourceWithRawResponse(client.generations) |
| 387 | + self.ping = ping.PingResourceWithRawResponse(client.ping) |
| 388 | + self.credits = credits.CreditsResourceWithRawResponse(client.credits) |
397 | 389 |
|
398 | 390 |
|
399 | 391 | class AsyncLumaAIWithRawResponse: |
400 | 392 | def __init__(self, client: AsyncLumaAI) -> None: |
401 | | - self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) |
402 | | - self.ping = resources.AsyncPingResourceWithRawResponse(client.ping) |
403 | | - self.credits = resources.AsyncCreditsResourceWithRawResponse(client.credits) |
| 393 | + self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) |
| 394 | + self.ping = ping.AsyncPingResourceWithRawResponse(client.ping) |
| 395 | + self.credits = credits.AsyncCreditsResourceWithRawResponse(client.credits) |
404 | 396 |
|
405 | 397 |
|
406 | 398 | class LumaAIWithStreamedResponse: |
407 | 399 | def __init__(self, client: LumaAI) -> None: |
408 | | - self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) |
409 | | - self.ping = resources.PingResourceWithStreamingResponse(client.ping) |
410 | | - self.credits = resources.CreditsResourceWithStreamingResponse(client.credits) |
| 400 | + self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) |
| 401 | + self.ping = ping.PingResourceWithStreamingResponse(client.ping) |
| 402 | + self.credits = credits.CreditsResourceWithStreamingResponse(client.credits) |
411 | 403 |
|
412 | 404 |
|
413 | 405 | class AsyncLumaAIWithStreamedResponse: |
414 | 406 | def __init__(self, client: AsyncLumaAI) -> None: |
415 | | - self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) |
416 | | - self.ping = resources.AsyncPingResourceWithStreamingResponse(client.ping) |
417 | | - self.credits = resources.AsyncCreditsResourceWithStreamingResponse(client.credits) |
| 407 | + self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) |
| 408 | + self.ping = ping.AsyncPingResourceWithStreamingResponse(client.ping) |
| 409 | + self.credits = credits.AsyncCreditsResourceWithStreamingResponse(client.credits) |
418 | 410 |
|
419 | 411 |
|
420 | 412 | Client = LumaAI |
|
0 commit comments