Skip to content

release: 0.2.0-alpha.14 #280

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jul 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.2.0-alpha.13"
".": "0.2.0-alpha.14"
}
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,19 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
### Removed
* Deprecated and removed `publish_ground_truths` method. Use `update_data` instead.

## 0.2.0-alpha.14 (2024-07-29)

Full Changelog: [v0.2.0-alpha.13...v0.2.0-alpha.14](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.13...v0.2.0-alpha.14)

### Features

* feat: allow inference_pipeline_id to be specified as a kwarg for tracing ([e2b9ace](https://github.com/openlayer-ai/openlayer-python/commit/e2b9ace1225db6630b7ab6546c542176567673ca))


### Chores

* **tests:** update prism version ([#279](https://github.com/openlayer-ai/openlayer-python/issues/279)) ([e2fe88f](https://github.com/openlayer-ai/openlayer-python/commit/e2fe88f8722769ca4e849596b78e983b82f36ac1))

## 0.2.0-alpha.13 (2024-07-23)

Full Changelog: [v0.2.0-alpha.12...v0.2.0-alpha.13](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.12...v0.2.0-alpha.13)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "openlayer"
version = "0.2.0-alpha.13"
version = "0.2.0-alpha.14"
description = "The official Python library for the openlayer API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
4 changes: 2 additions & 2 deletions scripts/mock
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}"

# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
npm exec --package=@stoplight/prism-cli@~5.8 -- prism mock "$URL" &> .prism.log &
npm exec --package=@stainless-api/prism-cli@5.8.4 -- prism mock "$URL" &> .prism.log &

# Wait for server to come online
echo -n "Waiting for server"
Expand All @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then

echo
else
npm exec --package=@stoplight/prism-cli@~5.8 -- prism mock "$URL"
npm exec --package=@stainless-api/prism-cli@5.8.4 -- prism mock "$URL"
fi
2 changes: 1 addition & 1 deletion src/openlayer/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "openlayer"
__version__ = "0.2.0-alpha.13" # x-release-please-version
__version__ = "0.2.0-alpha.14" # x-release-please-version
12 changes: 7 additions & 5 deletions src/openlayer/lib/tracing/tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def create_step(
inputs: Optional[Any] = None,
output: Optional[Any] = None,
metadata: Optional[Dict[str, Any]] = None,
inference_pipeline_id: Optional[str] = None,
) -> Generator[steps.Step, None, None]:
"""Starts a trace and yields a Step object."""
new_step: steps.Step = steps.step_factory(
Expand Down Expand Up @@ -99,7 +100,8 @@ def create_step(
if _publish:
try:
_client.inference_pipelines.data.stream(
id=utils.get_env_variable("OPENLAYER_INFERENCE_PIPELINE_ID"),
inference_pipeline_id=inference_pipeline_id
or utils.get_env_variable("OPENLAYER_INFERENCE_PIPELINE_ID"),
rows=[trace_data],
config=config,
)
Expand All @@ -119,7 +121,7 @@ def add_chat_completion_step_to_trace(**kwargs) -> None:


# ----------------------------- Tracing decorator ---------------------------- #
def trace(*step_args, **step_kwargs):
def trace(*step_args, inference_pipeline_id: Optional[str] = None, **step_kwargs):
"""Decorator to trace a function.

Examples
Expand Down Expand Up @@ -163,7 +165,7 @@ def decorator(func):
def wrapper(*func_args, **func_kwargs):
if step_kwargs.get("name") is None:
step_kwargs["name"] = func.__name__
with create_step(*step_args, **step_kwargs) as step:
with create_step(*step_args, inference_pipeline_id=inference_pipeline_id, **step_kwargs) as step:
output = exception = None
try:
output = func(*func_args, **func_kwargs)
Expand Down Expand Up @@ -196,7 +198,7 @@ def wrapper(*func_args, **func_kwargs):
return decorator


def trace_async(*step_args, **step_kwargs):
def trace_async(*step_args, inference_pipeline_id: Optional[str] = None, **step_kwargs):
"""Decorator to trace a function.

Examples
Expand Down Expand Up @@ -240,7 +242,7 @@ def decorator(func):
async def wrapper(*func_args, **func_kwargs):
if step_kwargs.get("name") is None:
step_kwargs["name"] = func.__name__
with create_step(*step_args, **step_kwargs) as step:
with create_step(*step_args, inference_pipeline_id=inference_pipeline_id, **step_kwargs) as step:
output = exception = None
try:
output = await func(*func_args, **func_kwargs)
Expand Down