Skip to content

Commit 2b4eb5d

Browse files
stainless-app[bot]stainless-bot
authored andcommitted
feat(api): update via SDK Studio (#258)
1 parent 38ac5ff commit 2b4eb5d

39 files changed

+3555
-38
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
configured_endpoints: 2
1+
configured_endpoints: 8

README.md

Lines changed: 128 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,26 @@ client = Openlayer(
3232
api_key=os.environ.get("OPENLAYER_API_KEY"),
3333
)
3434

35-
project_create_response = client.projects.create(
36-
name="My Project",
37-
task_type="llm-base",
35+
data_stream_response = client.inference_pipelines.data.stream(
36+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
37+
config={
38+
"input_variable_names": ["user_query"],
39+
"output_column_name": "output",
40+
"num_of_token_column_name": "tokens",
41+
"cost_column_name": "cost",
42+
"timestamp_column_name": "timestamp",
43+
},
44+
rows=[
45+
{
46+
"user_query": "what's the meaning of life?",
47+
"output": "42",
48+
"tokens": 7,
49+
"cost": 0.02,
50+
"timestamp": 1620000000,
51+
}
52+
],
3853
)
39-
print(project_create_response.id)
54+
print(data_stream_response.success)
4055
```
4156

4257
While you can provide an `api_key` keyword argument,
@@ -60,11 +75,26 @@ client = AsyncOpenlayer(
6075

6176

6277
async def main() -> None:
63-
project_create_response = await client.projects.create(
64-
name="My Project",
65-
task_type="llm-base",
78+
data_stream_response = await client.inference_pipelines.data.stream(
79+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
80+
config={
81+
"input_variable_names": ["user_query"],
82+
"output_column_name": "output",
83+
"num_of_token_column_name": "tokens",
84+
"cost_column_name": "cost",
85+
"timestamp_column_name": "timestamp",
86+
},
87+
rows=[
88+
{
89+
"user_query": "what's the meaning of life?",
90+
"output": "42",
91+
"tokens": 7,
92+
"cost": 0.02,
93+
"timestamp": 1620000000,
94+
}
95+
],
6696
)
67-
print(project_create_response.id)
97+
print(data_stream_response.success)
6898

6999

70100
asyncio.run(main())
@@ -97,9 +127,24 @@ from openlayer import Openlayer
97127
client = Openlayer()
98128

99129
try:
100-
client.projects.create(
101-
name="My Project",
102-
task_type="llm-base",
130+
client.inference_pipelines.data.stream(
131+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
132+
config={
133+
"input_variable_names": ["user_query"],
134+
"output_column_name": "output",
135+
"num_of_token_column_name": "tokens",
136+
"cost_column_name": "cost",
137+
"timestamp_column_name": "timestamp",
138+
},
139+
rows=[
140+
{
141+
"user_query": "what's the meaning of life?",
142+
"output": "42",
143+
"tokens": 7,
144+
"cost": 0.02,
145+
"timestamp": 1620000000,
146+
}
147+
],
103148
)
104149
except openlayer.APIConnectionError as e:
105150
print("The server could not be reached")
@@ -143,9 +188,24 @@ client = Openlayer(
143188
)
144189

145190
# Or, configure per-request:
146-
client.with_options(max_retries=5).projects.create(
147-
name="My Project",
148-
task_type="llm-base",
191+
client.with_options(max_retries=5).inference_pipelines.data.stream(
192+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
193+
config={
194+
"input_variable_names": ["user_query"],
195+
"output_column_name": "output",
196+
"num_of_token_column_name": "tokens",
197+
"cost_column_name": "cost",
198+
"timestamp_column_name": "timestamp",
199+
},
200+
rows=[
201+
{
202+
"user_query": "what's the meaning of life?",
203+
"output": "42",
204+
"tokens": 7,
205+
"cost": 0.02,
206+
"timestamp": 1620000000,
207+
}
208+
],
149209
)
150210
```
151211

@@ -169,9 +229,24 @@ client = Openlayer(
169229
)
170230

171231
# Override per-request:
172-
client.with_options(timeout=5.0).projects.create(
173-
name="My Project",
174-
task_type="llm-base",
232+
client.with_options(timeout=5.0).inference_pipelines.data.stream(
233+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
234+
config={
235+
"input_variable_names": ["user_query"],
236+
"output_column_name": "output",
237+
"num_of_token_column_name": "tokens",
238+
"cost_column_name": "cost",
239+
"timestamp_column_name": "timestamp",
240+
},
241+
rows=[
242+
{
243+
"user_query": "what's the meaning of life?",
244+
"output": "42",
245+
"tokens": 7,
246+
"cost": 0.02,
247+
"timestamp": 1620000000,
248+
}
249+
],
175250
)
176251
```
177252

@@ -211,14 +286,27 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to
211286
from openlayer import Openlayer
212287

213288
client = Openlayer()
214-
response = client.projects.with_raw_response.create(
215-
name="My Project",
216-
task_type="llm-base",
289+
response = client.inference_pipelines.data.with_raw_response.stream(
290+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
291+
config={
292+
"input_variable_names": ["user_query"],
293+
"output_column_name": "output",
294+
"num_of_token_column_name": "tokens",
295+
"cost_column_name": "cost",
296+
"timestamp_column_name": "timestamp",
297+
},
298+
rows=[{
299+
"user_query": "what's the meaning of life?",
300+
"output": "42",
301+
"tokens": 7,
302+
"cost": 0.02,
303+
"timestamp": 1620000000,
304+
}],
217305
)
218306
print(response.headers.get('X-My-Header'))
219307

220-
project = response.parse() # get the object that `projects.create()` would have returned
221-
print(project.id)
308+
data = response.parse() # get the object that `inference_pipelines.data.stream()` would have returned
309+
print(data.success)
222310
```
223311

224312
These methods return an [`APIResponse`](https://github.com/openlayer-ai/openlayer-python/tree/main/src/openlayer/_response.py) object.
@@ -232,9 +320,24 @@ The above interface eagerly reads the full response body when you make the reque
232320
To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.
233321

234322
```python
235-
with client.projects.with_streaming_response.create(
236-
name="My Project",
237-
task_type="llm-base",
323+
with client.inference_pipelines.data.with_streaming_response.stream(
324+
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
325+
config={
326+
"input_variable_names": ["user_query"],
327+
"output_column_name": "output",
328+
"num_of_token_column_name": "tokens",
329+
"cost_column_name": "cost",
330+
"timestamp_column_name": "timestamp",
331+
},
332+
rows=[
333+
{
334+
"user_query": "what's the meaning of life?",
335+
"output": "42",
336+
"tokens": 7,
337+
"cost": 0.02,
338+
"timestamp": 1620000000,
339+
}
340+
],
238341
) as response:
239342
print(response.headers.get("X-My-Header"))
240343

api.md

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,68 @@ Methods:
1010

1111
- <code title="post /projects">client.projects.<a href="./src/openlayer/resources/projects/projects.py">create</a>(\*\*<a href="src/openlayer/types/project_create_params.py">params</a>) -> <a href="./src/openlayer/types/project_create_response.py">ProjectCreateResponse</a></code>
1212
- <code title="get /projects">client.projects.<a href="./src/openlayer/resources/projects/projects.py">list</a>(\*\*<a href="src/openlayer/types/project_list_params.py">params</a>) -> <a href="./src/openlayer/types/project_list_response.py">ProjectListResponse</a></code>
13+
14+
## Commits
15+
16+
Types:
17+
18+
```python
19+
from openlayer.types.projects import CommitListResponse
20+
```
21+
22+
Methods:
23+
24+
- <code title="get /projects/{projectId}/versions">client.projects.commits.<a href="./src/openlayer/resources/projects/commits.py">list</a>(project_id, \*\*<a href="src/openlayer/types/projects/commit_list_params.py">params</a>) -> <a href="./src/openlayer/types/projects/commit_list_response.py">CommitListResponse</a></code>
25+
26+
## InferencePipelines
27+
28+
Types:
29+
30+
```python
31+
from openlayer.types.projects import InferencePipelineCreateResponse, InferencePipelineListResponse
32+
```
33+
34+
Methods:
35+
36+
- <code title="post /projects/{projectId}/inference-pipelines">client.projects.inference_pipelines.<a href="./src/openlayer/resources/projects/inference_pipelines.py">create</a>(project_id, \*\*<a href="src/openlayer/types/projects/inference_pipeline_create_params.py">params</a>) -> <a href="./src/openlayer/types/projects/inference_pipeline_create_response.py">InferencePipelineCreateResponse</a></code>
37+
- <code title="get /projects/{projectId}/inference-pipelines">client.projects.inference_pipelines.<a href="./src/openlayer/resources/projects/inference_pipelines.py">list</a>(project_id, \*\*<a href="src/openlayer/types/projects/inference_pipeline_list_params.py">params</a>) -> <a href="./src/openlayer/types/projects/inference_pipeline_list_response.py">InferencePipelineListResponse</a></code>
38+
39+
# Commits
40+
41+
## TestResults
42+
43+
Types:
44+
45+
```python
46+
from openlayer.types.commits import TestResultListResponse
47+
```
48+
49+
Methods:
50+
51+
- <code title="get /versions/{projectVersionId}/results">client.commits.test_results.<a href="./src/openlayer/resources/commits/test_results.py">list</a>(project_version_id, \*\*<a href="src/openlayer/types/commits/test_result_list_params.py">params</a>) -> <a href="./src/openlayer/types/commits/test_result_list_response.py">TestResultListResponse</a></code>
52+
53+
# InferencePipelines
54+
55+
## Data
56+
57+
Types:
58+
59+
```python
60+
from openlayer.types.inference_pipelines import DataStreamResponse
61+
```
62+
63+
Methods:
64+
65+
- <code title="post /inference-pipelines/{inferencePipelineId}/data-stream">client.inference_pipelines.data.<a href="./src/openlayer/resources/inference_pipelines/data.py">stream</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipelines/data_stream_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipelines/data_stream_response.py">DataStreamResponse</a></code>
66+
67+
## TestResults
68+
69+
Types:
70+
71+
```python
72+
from openlayer.types.inference_pipelines import TestResultListResponse
73+
```
74+
75+
Methods:
76+
77+
- <code title="get /inference-pipelines/{inferencePipelineId}/results">client.inference_pipelines.test_results.<a href="./src/openlayer/resources/inference_pipelines/test_results.py">list</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipelines/test_result_list_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipelines/test_result_list_response.py">TestResultListResponse</a></code>

src/openlayer/_client.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,8 @@
4848

4949
class Openlayer(SyncAPIClient):
5050
projects: resources.ProjectsResource
51+
commits: resources.CommitsResource
52+
inference_pipelines: resources.InferencePipelinesResource
5153
with_raw_response: OpenlayerWithRawResponse
5254
with_streaming_response: OpenlayerWithStreamedResponse
5355

@@ -102,6 +104,8 @@ def __init__(
102104
)
103105

104106
self.projects = resources.ProjectsResource(self)
107+
self.commits = resources.CommitsResource(self)
108+
self.inference_pipelines = resources.InferencePipelinesResource(self)
105109
self.with_raw_response = OpenlayerWithRawResponse(self)
106110
self.with_streaming_response = OpenlayerWithStreamedResponse(self)
107111

@@ -225,6 +229,8 @@ def _make_status_error(
225229

226230
class AsyncOpenlayer(AsyncAPIClient):
227231
projects: resources.AsyncProjectsResource
232+
commits: resources.AsyncCommitsResource
233+
inference_pipelines: resources.AsyncInferencePipelinesResource
228234
with_raw_response: AsyncOpenlayerWithRawResponse
229235
with_streaming_response: AsyncOpenlayerWithStreamedResponse
230236

@@ -279,6 +285,8 @@ def __init__(
279285
)
280286

281287
self.projects = resources.AsyncProjectsResource(self)
288+
self.commits = resources.AsyncCommitsResource(self)
289+
self.inference_pipelines = resources.AsyncInferencePipelinesResource(self)
282290
self.with_raw_response = AsyncOpenlayerWithRawResponse(self)
283291
self.with_streaming_response = AsyncOpenlayerWithStreamedResponse(self)
284292

@@ -403,21 +411,31 @@ def _make_status_error(
403411
class OpenlayerWithRawResponse:
404412
def __init__(self, client: Openlayer) -> None:
405413
self.projects = resources.ProjectsResourceWithRawResponse(client.projects)
414+
self.commits = resources.CommitsResourceWithRawResponse(client.commits)
415+
self.inference_pipelines = resources.InferencePipelinesResourceWithRawResponse(client.inference_pipelines)
406416

407417

408418
class AsyncOpenlayerWithRawResponse:
409419
def __init__(self, client: AsyncOpenlayer) -> None:
410420
self.projects = resources.AsyncProjectsResourceWithRawResponse(client.projects)
421+
self.commits = resources.AsyncCommitsResourceWithRawResponse(client.commits)
422+
self.inference_pipelines = resources.AsyncInferencePipelinesResourceWithRawResponse(client.inference_pipelines)
411423

412424

413425
class OpenlayerWithStreamedResponse:
414426
def __init__(self, client: Openlayer) -> None:
415427
self.projects = resources.ProjectsResourceWithStreamingResponse(client.projects)
428+
self.commits = resources.CommitsResourceWithStreamingResponse(client.commits)
429+
self.inference_pipelines = resources.InferencePipelinesResourceWithStreamingResponse(client.inference_pipelines)
416430

417431

418432
class AsyncOpenlayerWithStreamedResponse:
419433
def __init__(self, client: AsyncOpenlayer) -> None:
420434
self.projects = resources.AsyncProjectsResourceWithStreamingResponse(client.projects)
435+
self.commits = resources.AsyncCommitsResourceWithStreamingResponse(client.commits)
436+
self.inference_pipelines = resources.AsyncInferencePipelinesResourceWithStreamingResponse(
437+
client.inference_pipelines
438+
)
421439

422440

423441
Client = Openlayer

src/openlayer/resources/__init__.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

3+
from .commits import (
4+
CommitsResource,
5+
AsyncCommitsResource,
6+
CommitsResourceWithRawResponse,
7+
AsyncCommitsResourceWithRawResponse,
8+
CommitsResourceWithStreamingResponse,
9+
AsyncCommitsResourceWithStreamingResponse,
10+
)
311
from .projects import (
412
ProjectsResource,
513
AsyncProjectsResource,
@@ -8,6 +16,14 @@
816
ProjectsResourceWithStreamingResponse,
917
AsyncProjectsResourceWithStreamingResponse,
1018
)
19+
from .inference_pipelines import (
20+
InferencePipelinesResource,
21+
AsyncInferencePipelinesResource,
22+
InferencePipelinesResourceWithRawResponse,
23+
AsyncInferencePipelinesResourceWithRawResponse,
24+
InferencePipelinesResourceWithStreamingResponse,
25+
AsyncInferencePipelinesResourceWithStreamingResponse,
26+
)
1127

1228
__all__ = [
1329
"ProjectsResource",
@@ -16,4 +32,16 @@
1632
"AsyncProjectsResourceWithRawResponse",
1733
"ProjectsResourceWithStreamingResponse",
1834
"AsyncProjectsResourceWithStreamingResponse",
35+
"CommitsResource",
36+
"AsyncCommitsResource",
37+
"CommitsResourceWithRawResponse",
38+
"AsyncCommitsResourceWithRawResponse",
39+
"CommitsResourceWithStreamingResponse",
40+
"AsyncCommitsResourceWithStreamingResponse",
41+
"InferencePipelinesResource",
42+
"AsyncInferencePipelinesResource",
43+
"InferencePipelinesResourceWithRawResponse",
44+
"AsyncInferencePipelinesResourceWithRawResponse",
45+
"InferencePipelinesResourceWithStreamingResponse",
46+
"AsyncInferencePipelinesResourceWithStreamingResponse",
1947
]

0 commit comments

Comments
 (0)