Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 6
configured_endpoints: 7
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/luma-ai-karanganesan%2Fluma_ai-a9bc6643a804b0df5ccca77dbeeff12341befaefe288ac306044047ded323577.yml
12 changes: 12 additions & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,15 @@ from lumaai.types import PingCheckResponse
Methods:

- <code title="get /ping">client.ping.<a href="./src/lumaai/resources/ping.py">check</a>() -> <a href="./src/lumaai/types/ping_check_response.py">PingCheckResponse</a></code>

# Credits

Types:

```python
from lumaai.types import CreditGetResponse
```

Methods:

- <code title="get /credits">client.credits.<a href="./src/lumaai/resources/credits.py">get</a>() -> <a href="./src/lumaai/types/credit_get_response.py">CreditGetResponse</a></code>
8 changes: 8 additions & 0 deletions src/lumaai/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
class LumaAI(SyncAPIClient):
generations: resources.GenerationsResource
ping: resources.PingResource
credits: resources.CreditsResource
with_raw_response: LumaAIWithRawResponse
with_streaming_response: LumaAIWithStreamedResponse

Expand Down Expand Up @@ -107,6 +108,7 @@ def __init__(

self.generations = resources.GenerationsResource(self)
self.ping = resources.PingResource(self)
self.credits = resources.CreditsResource(self)
self.with_raw_response = LumaAIWithRawResponse(self)
self.with_streaming_response = LumaAIWithStreamedResponse(self)

Expand Down Expand Up @@ -218,6 +220,7 @@ def _make_status_error(
class AsyncLumaAI(AsyncAPIClient):
generations: resources.AsyncGenerationsResource
ping: resources.AsyncPingResource
credits: resources.AsyncCreditsResource
with_raw_response: AsyncLumaAIWithRawResponse
with_streaming_response: AsyncLumaAIWithStreamedResponse

Expand Down Expand Up @@ -277,6 +280,7 @@ def __init__(

self.generations = resources.AsyncGenerationsResource(self)
self.ping = resources.AsyncPingResource(self)
self.credits = resources.AsyncCreditsResource(self)
self.with_raw_response = AsyncLumaAIWithRawResponse(self)
self.with_streaming_response = AsyncLumaAIWithStreamedResponse(self)

Expand Down Expand Up @@ -389,24 +393,28 @@ class LumaAIWithRawResponse:
def __init__(self, client: LumaAI) -> None:
self.generations = resources.GenerationsResourceWithRawResponse(client.generations)
self.ping = resources.PingResourceWithRawResponse(client.ping)
self.credits = resources.CreditsResourceWithRawResponse(client.credits)


class AsyncLumaAIWithRawResponse:
def __init__(self, client: AsyncLumaAI) -> None:
self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations)
self.ping = resources.AsyncPingResourceWithRawResponse(client.ping)
self.credits = resources.AsyncCreditsResourceWithRawResponse(client.credits)


class LumaAIWithStreamedResponse:
def __init__(self, client: LumaAI) -> None:
self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations)
self.ping = resources.PingResourceWithStreamingResponse(client.ping)
self.credits = resources.CreditsResourceWithStreamingResponse(client.credits)


class AsyncLumaAIWithStreamedResponse:
def __init__(self, client: AsyncLumaAI) -> None:
self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations)
self.ping = resources.AsyncPingResourceWithStreamingResponse(client.ping)
self.credits = resources.AsyncCreditsResourceWithStreamingResponse(client.credits)


Client = LumaAI
Expand Down
14 changes: 14 additions & 0 deletions src/lumaai/resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@
PingResourceWithStreamingResponse,
AsyncPingResourceWithStreamingResponse,
)
from .credits import (
CreditsResource,
AsyncCreditsResource,
CreditsResourceWithRawResponse,
AsyncCreditsResourceWithRawResponse,
CreditsResourceWithStreamingResponse,
AsyncCreditsResourceWithStreamingResponse,
)
from .generations import (
GenerationsResource,
AsyncGenerationsResource,
Expand All @@ -30,4 +38,10 @@
"AsyncPingResourceWithRawResponse",
"PingResourceWithStreamingResponse",
"AsyncPingResourceWithStreamingResponse",
"CreditsResource",
"AsyncCreditsResource",
"CreditsResourceWithRawResponse",
"AsyncCreditsResourceWithRawResponse",
"CreditsResourceWithStreamingResponse",
"AsyncCreditsResourceWithStreamingResponse",
]
135 changes: 135 additions & 0 deletions src/lumaai/resources/credits.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

import httpx

from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
from ..types.credit_get_response import CreditGetResponse

__all__ = ["CreditsResource", "AsyncCreditsResource"]


class CreditsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> CreditsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return the
the raw response object instead of the parsed content.

For more information, see https://www.github.com/lumalabs/lumaai-python#accessing-raw-response-data-eg-headers
"""
return CreditsResourceWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> CreditsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.

For more information, see https://www.github.com/lumalabs/lumaai-python#with_streaming_response
"""
return CreditsResourceWithStreamingResponse(self)

def get(
self,
*,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> CreditGetResponse:
"""Get the credits information for the api user"""
return self._get(
"/credits",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=CreditGetResponse,
)


class AsyncCreditsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncCreditsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return the
the raw response object instead of the parsed content.

For more information, see https://www.github.com/lumalabs/lumaai-python#accessing-raw-response-data-eg-headers
"""
return AsyncCreditsResourceWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> AsyncCreditsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.

For more information, see https://www.github.com/lumalabs/lumaai-python#with_streaming_response
"""
return AsyncCreditsResourceWithStreamingResponse(self)

async def get(
self,
*,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> CreditGetResponse:
"""Get the credits information for the api user"""
return await self._get(
"/credits",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=CreditGetResponse,
)


class CreditsResourceWithRawResponse:
def __init__(self, credits: CreditsResource) -> None:
self._credits = credits

self.get = to_raw_response_wrapper(
credits.get,
)


class AsyncCreditsResourceWithRawResponse:
def __init__(self, credits: AsyncCreditsResource) -> None:
self._credits = credits

self.get = async_to_raw_response_wrapper(
credits.get,
)


class CreditsResourceWithStreamingResponse:
def __init__(self, credits: CreditsResource) -> None:
self._credits = credits

self.get = to_streamed_response_wrapper(
credits.get,
)


class AsyncCreditsResourceWithStreamingResponse:
def __init__(self, credits: AsyncCreditsResource) -> None:
self._credits = credits

self.get = async_to_streamed_response_wrapper(
credits.get,
)
1 change: 1 addition & 0 deletions src/lumaai/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from __future__ import annotations

from .generation import Generation as Generation
from .credit_get_response import CreditGetResponse as CreditGetResponse
from .ping_check_response import PingCheckResponse as PingCheckResponse
from .generation_list_params import GenerationListParams as GenerationListParams
from .generation_create_params import GenerationCreateParams as GenerationCreateParams
Expand Down
12 changes: 12 additions & 0 deletions src/lumaai/types/credit_get_response.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.



from .._models import BaseModel

__all__ = ["CreditGetResponse"]


class CreditGetResponse(BaseModel):
credit_balance: float
"""Available credits balance in USD cents"""
72 changes: 72 additions & 0 deletions tests/api_resources/test_credits.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

import os
from typing import Any, cast

import pytest

from lumaai import LumaAI, AsyncLumaAI
from tests.utils import assert_matches_type
from lumaai.types import CreditGetResponse

base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")


class TestCredits:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])

@parametrize
def test_method_get(self, client: LumaAI) -> None:
credit = client.credits.get()
assert_matches_type(CreditGetResponse, credit, path=["response"])

@parametrize
def test_raw_response_get(self, client: LumaAI) -> None:
response = client.credits.with_raw_response.get()

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
credit = response.parse()
assert_matches_type(CreditGetResponse, credit, path=["response"])

@parametrize
def test_streaming_response_get(self, client: LumaAI) -> None:
with client.credits.with_streaming_response.get() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

credit = response.parse()
assert_matches_type(CreditGetResponse, credit, path=["response"])

assert cast(Any, response.is_closed) is True


class TestAsyncCredits:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])

@parametrize
async def test_method_get(self, async_client: AsyncLumaAI) -> None:
credit = await async_client.credits.get()
assert_matches_type(CreditGetResponse, credit, path=["response"])

@parametrize
async def test_raw_response_get(self, async_client: AsyncLumaAI) -> None:
response = await async_client.credits.with_raw_response.get()

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
credit = await response.parse()
assert_matches_type(CreditGetResponse, credit, path=["response"])

@parametrize
async def test_streaming_response_get(self, async_client: AsyncLumaAI) -> None:
async with async_client.credits.with_streaming_response.get() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

credit = await response.parse()
assert_matches_type(CreditGetResponse, credit, path=["response"])

assert cast(Any, response.is_closed) is True