Skip to content

Commit a02f039

Browse files
Auto-generated code for main (#2473)
Co-authored-by: Quentin Pradet <quentin.pradet@elastic.co>
1 parent ce50d9e commit a02f039

File tree

2 files changed

+10
-0
lines changed

2 files changed

+10
-0
lines changed

elasticsearch/_async/client/ml.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3627,6 +3627,7 @@ async def put_job(
36273627
"model_size_bytes",
36283628
"model_type",
36293629
"platform_architecture",
3630+
"prefix_strings",
36303631
"tags",
36313632
),
36323633
)
@@ -3649,6 +3650,7 @@ async def put_trained_model(
36493650
t.Union["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']", str]
36503651
] = None,
36513652
platform_architecture: t.Optional[str] = None,
3653+
prefix_strings: t.Optional[t.Mapping[str, t.Any]] = None,
36523654
pretty: t.Optional[bool] = None,
36533655
tags: t.Optional[t.Sequence[str]] = None,
36543656
body: t.Optional[t.Dict[str, t.Any]] = None,
@@ -3686,6 +3688,7 @@ async def put_trained_model(
36863688
`darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models
36873689
(those that work independent of processor architecture or OS features), leave
36883690
this field unset.
3691+
:param prefix_strings: Optional prefix strings applied at inference
36893692
:param tags: An array of tags to organize the model.
36903693
"""
36913694
if model_id in SKIP_IN_PATH:
@@ -3723,6 +3726,8 @@ async def put_trained_model(
37233726
__body["model_type"] = model_type
37243727
if platform_architecture is not None:
37253728
__body["platform_architecture"] = platform_architecture
3729+
if prefix_strings is not None:
3730+
__body["prefix_strings"] = prefix_strings
37263731
if tags is not None:
37273732
__body["tags"] = tags
37283733
__headers = {"accept": "application/json", "content-type": "application/json"}

elasticsearch/_sync/client/ml.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3627,6 +3627,7 @@ def put_job(
36273627
"model_size_bytes",
36283628
"model_type",
36293629
"platform_architecture",
3630+
"prefix_strings",
36303631
"tags",
36313632
),
36323633
)
@@ -3649,6 +3650,7 @@ def put_trained_model(
36493650
t.Union["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']", str]
36503651
] = None,
36513652
platform_architecture: t.Optional[str] = None,
3653+
prefix_strings: t.Optional[t.Mapping[str, t.Any]] = None,
36523654
pretty: t.Optional[bool] = None,
36533655
tags: t.Optional[t.Sequence[str]] = None,
36543656
body: t.Optional[t.Dict[str, t.Any]] = None,
@@ -3686,6 +3688,7 @@ def put_trained_model(
36863688
`darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models
36873689
(those that work independent of processor architecture or OS features), leave
36883690
this field unset.
3691+
:param prefix_strings: Optional prefix strings applied at inference
36893692
:param tags: An array of tags to organize the model.
36903693
"""
36913694
if model_id in SKIP_IN_PATH:
@@ -3723,6 +3726,8 @@ def put_trained_model(
37233726
__body["model_type"] = model_type
37243727
if platform_architecture is not None:
37253728
__body["platform_architecture"] = platform_architecture
3729+
if prefix_strings is not None:
3730+
__body["prefix_strings"] = prefix_strings
37263731
if tags is not None:
37273732
__body["tags"] = tags
37283733
__headers = {"accept": "application/json", "content-type": "application/json"}

0 commit comments

Comments
 (0)