Skip to content

feat(api): OpenAPI spec update via Stainless API #256

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 6
configured_endpoints: 2
153 changes: 25 additions & 128 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,26 +32,11 @@ client = Openlayer(
api_key=os.environ.get("OPENLAYER_API_KEY"),
)

data_stream_response = client.inference_pipelines.data.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[
{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}
],
project_create_response = client.projects.create(
name="My Project",
task_type="llm-base",
)
print(data_stream_response.success)
print(project_create_response.id)
```

While you can provide an `api_key` keyword argument,
Expand All @@ -75,26 +60,11 @@ client = AsyncOpenlayer(


async def main() -> None:
data_stream_response = await client.inference_pipelines.data.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[
{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}
],
project_create_response = await client.projects.create(
name="My Project",
task_type="llm-base",
)
print(data_stream_response.success)
print(project_create_response.id)


asyncio.run(main())
Expand Down Expand Up @@ -127,24 +97,9 @@ from openlayer import Openlayer
client = Openlayer()

try:
client.inference_pipelines.data.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[
{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}
],
client.projects.create(
name="My Project",
task_type="llm-base",
)
except openlayer.APIConnectionError as e:
print("The server could not be reached")
Expand Down Expand Up @@ -188,24 +143,9 @@ client = Openlayer(
)

# Or, configure per-request:
client.with_options(max_retries=5).inference_pipelines.data.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[
{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}
],
client.with_options(max_retries=5).projects.create(
name="My Project",
task_type="llm-base",
)
```

Expand All @@ -229,24 +169,9 @@ client = Openlayer(
)

# Override per-request:
client.with_options(timeout=5.0).inference_pipelines.data.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[
{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}
],
client.with_options(timeout=5.0).projects.create(
name="My Project",
task_type="llm-base",
)
```

Expand Down Expand Up @@ -286,27 +211,14 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to
from openlayer import Openlayer

client = Openlayer()
response = client.inference_pipelines.data.with_raw_response.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}],
response = client.projects.with_raw_response.create(
name="My Project",
task_type="llm-base",
)
print(response.headers.get('X-My-Header'))

data = response.parse() # get the object that `inference_pipelines.data.stream()` would have returned
print(data.success)
project = response.parse() # get the object that `projects.create()` would have returned
print(project.id)
```

These methods return an [`APIResponse`](https://github.com/openlayer-ai/openlayer-python/tree/main/src/openlayer/_response.py) object.
Expand All @@ -320,24 +232,9 @@ The above interface eagerly reads the full response body when you make the reque
To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.

```python
with client.inference_pipelines.data.with_streaming_response.stream(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"input_variable_names": ["user_query"],
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"cost_column_name": "cost",
"timestamp_column_name": "timestamp",
},
rows=[
{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": 7,
"cost": 0.02,
"timestamp": 1620000000,
}
],
with client.projects.with_streaming_response.create(
name="My Project",
task_type="llm-base",
) as response:
print(response.headers.get("X-My-Header"))

Expand Down
52 changes: 0 additions & 52 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,55 +10,3 @@ Methods:

- <code title="post /projects">client.projects.<a href="./src/openlayer/resources/projects/projects.py">create</a>(\*\*<a href="src/openlayer/types/project_create_params.py">params</a>) -> <a href="./src/openlayer/types/project_create_response.py">ProjectCreateResponse</a></code>
- <code title="get /projects">client.projects.<a href="./src/openlayer/resources/projects/projects.py">list</a>(\*\*<a href="src/openlayer/types/project_list_params.py">params</a>) -> <a href="./src/openlayer/types/project_list_response.py">ProjectListResponse</a></code>

## Commits

Types:

```python
from openlayer.types.projects import CommitListResponse
```

Methods:

- <code title="get /projects/{id}/versions">client.projects.commits.<a href="./src/openlayer/resources/projects/commits.py">list</a>(id, \*\*<a href="src/openlayer/types/projects/commit_list_params.py">params</a>) -> <a href="./src/openlayer/types/projects/commit_list_response.py">CommitListResponse</a></code>

# Commits

## TestResults

Types:

```python
from openlayer.types.commits import TestResultListResponse
```

Methods:

- <code title="get /versions/{id}/results">client.commits.test_results.<a href="./src/openlayer/resources/commits/test_results.py">list</a>(id, \*\*<a href="src/openlayer/types/commits/test_result_list_params.py">params</a>) -> <a href="./src/openlayer/types/commits/test_result_list_response.py">TestResultListResponse</a></code>

# InferencePipelines

## Data

Types:

```python
from openlayer.types.inference_pipelines import DataStreamResponse
```

Methods:

- <code title="post /inference-pipelines/{id}/data-stream">client.inference_pipelines.data.<a href="./src/openlayer/resources/inference_pipelines/data.py">stream</a>(id, \*\*<a href="src/openlayer/types/inference_pipelines/data_stream_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipelines/data_stream_response.py">DataStreamResponse</a></code>

## TestResults

Types:

```python
from openlayer.types.inference_pipelines import TestResultListResponse
```

Methods:

- <code title="get /inference-pipelines/{id}/results">client.inference_pipelines.test_results.<a href="./src/openlayer/resources/inference_pipelines/test_results.py">list</a>(id, \*\*<a href="src/openlayer/types/inference_pipelines/test_result_list_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipelines/test_result_list_response.py">TestResultListResponse</a></code>
18 changes: 0 additions & 18 deletions src/openlayer/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,6 @@

class Openlayer(SyncAPIClient):
projects: resources.ProjectsResource
commits: resources.CommitsResource
inference_pipelines: resources.InferencePipelinesResource
with_raw_response: OpenlayerWithRawResponse
with_streaming_response: OpenlayerWithStreamedResponse

Expand Down Expand Up @@ -104,8 +102,6 @@ def __init__(
)

self.projects = resources.ProjectsResource(self)
self.commits = resources.CommitsResource(self)
self.inference_pipelines = resources.InferencePipelinesResource(self)
self.with_raw_response = OpenlayerWithRawResponse(self)
self.with_streaming_response = OpenlayerWithStreamedResponse(self)

Expand Down Expand Up @@ -229,8 +225,6 @@ def _make_status_error(

class AsyncOpenlayer(AsyncAPIClient):
projects: resources.AsyncProjectsResource
commits: resources.AsyncCommitsResource
inference_pipelines: resources.AsyncInferencePipelinesResource
with_raw_response: AsyncOpenlayerWithRawResponse
with_streaming_response: AsyncOpenlayerWithStreamedResponse

Expand Down Expand Up @@ -285,8 +279,6 @@ def __init__(
)

self.projects = resources.AsyncProjectsResource(self)
self.commits = resources.AsyncCommitsResource(self)
self.inference_pipelines = resources.AsyncInferencePipelinesResource(self)
self.with_raw_response = AsyncOpenlayerWithRawResponse(self)
self.with_streaming_response = AsyncOpenlayerWithStreamedResponse(self)

Expand Down Expand Up @@ -411,31 +403,21 @@ def _make_status_error(
class OpenlayerWithRawResponse:
def __init__(self, client: Openlayer) -> None:
self.projects = resources.ProjectsResourceWithRawResponse(client.projects)
self.commits = resources.CommitsResourceWithRawResponse(client.commits)
self.inference_pipelines = resources.InferencePipelinesResourceWithRawResponse(client.inference_pipelines)


class AsyncOpenlayerWithRawResponse:
def __init__(self, client: AsyncOpenlayer) -> None:
self.projects = resources.AsyncProjectsResourceWithRawResponse(client.projects)
self.commits = resources.AsyncCommitsResourceWithRawResponse(client.commits)
self.inference_pipelines = resources.AsyncInferencePipelinesResourceWithRawResponse(client.inference_pipelines)


class OpenlayerWithStreamedResponse:
def __init__(self, client: Openlayer) -> None:
self.projects = resources.ProjectsResourceWithStreamingResponse(client.projects)
self.commits = resources.CommitsResourceWithStreamingResponse(client.commits)
self.inference_pipelines = resources.InferencePipelinesResourceWithStreamingResponse(client.inference_pipelines)


class AsyncOpenlayerWithStreamedResponse:
def __init__(self, client: AsyncOpenlayer) -> None:
self.projects = resources.AsyncProjectsResourceWithStreamingResponse(client.projects)
self.commits = resources.AsyncCommitsResourceWithStreamingResponse(client.commits)
self.inference_pipelines = resources.AsyncInferencePipelinesResourceWithStreamingResponse(
client.inference_pipelines
)


Client = Openlayer
Expand Down
28 changes: 0 additions & 28 deletions src/openlayer/resources/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from .commits import (
CommitsResource,
AsyncCommitsResource,
CommitsResourceWithRawResponse,
AsyncCommitsResourceWithRawResponse,
CommitsResourceWithStreamingResponse,
AsyncCommitsResourceWithStreamingResponse,
)
from .projects import (
ProjectsResource,
AsyncProjectsResource,
Expand All @@ -16,14 +8,6 @@
ProjectsResourceWithStreamingResponse,
AsyncProjectsResourceWithStreamingResponse,
)
from .inference_pipelines import (
InferencePipelinesResource,
AsyncInferencePipelinesResource,
InferencePipelinesResourceWithRawResponse,
AsyncInferencePipelinesResourceWithRawResponse,
InferencePipelinesResourceWithStreamingResponse,
AsyncInferencePipelinesResourceWithStreamingResponse,
)

__all__ = [
"ProjectsResource",
Expand All @@ -32,16 +16,4 @@
"AsyncProjectsResourceWithRawResponse",
"ProjectsResourceWithStreamingResponse",
"AsyncProjectsResourceWithStreamingResponse",
"CommitsResource",
"AsyncCommitsResource",
"CommitsResourceWithRawResponse",
"AsyncCommitsResourceWithRawResponse",
"CommitsResourceWithStreamingResponse",
"AsyncCommitsResourceWithStreamingResponse",
"InferencePipelinesResource",
"AsyncInferencePipelinesResource",
"InferencePipelinesResourceWithRawResponse",
"AsyncInferencePipelinesResourceWithRawResponse",
"InferencePipelinesResourceWithStreamingResponse",
"AsyncInferencePipelinesResourceWithStreamingResponse",
]
Loading