Skip to content

Commit 56403ce

Browse files
Auto-generated API code
1 parent a6bb5f3 commit 56403ce

File tree

8 files changed

+46
-14
lines changed

8 files changed

+46
-14
lines changed

elasticsearch/_async/client/__init__.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2300,7 +2300,9 @@ async def knn_search(
23002300
human: t.Optional[bool] = None,
23012301
pretty: t.Optional[bool] = None,
23022302
routing: t.Optional[str] = None,
2303-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
2303+
source: t.Optional[
2304+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
2305+
] = None,
23042306
stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None,
23052307
) -> ObjectApiResponse[t.Any]:
23062308
"""
@@ -3310,7 +3312,9 @@ async def search(
33103312
collapse: t.Optional[t.Mapping[str, t.Any]] = None,
33113313
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
33123314
df: t.Optional[str] = None,
3313-
docvalue_fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None,
3315+
docvalue_fields: t.Optional[
3316+
t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]
3317+
] = None,
33143318
error_trace: t.Optional[bool] = None,
33153319
expand_wildcards: t.Optional[
33163320
t.Union[
@@ -3322,7 +3326,7 @@ async def search(
33223326
] = None,
33233327
explain: t.Optional[bool] = None,
33243328
ext: t.Optional[t.Mapping[str, t.Any]] = None,
3325-
fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None,
3329+
fields: t.Optional[t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]] = None,
33263330
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
33273331
from_: t.Optional[int] = None,
33283332
highlight: t.Optional[t.Mapping[str, t.Any]] = None,
@@ -3370,7 +3374,9 @@ async def search(
33703374
t.Union[str, t.Mapping[str, t.Any]],
33713375
]
33723376
] = None,
3373-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
3377+
source: t.Optional[
3378+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
3379+
] = None,
33743380
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
33753381
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
33763382
stats: t.Optional[t.Sequence[str]] = None,
@@ -4326,7 +4332,9 @@ async def update(
43264332
routing: t.Optional[str] = None,
43274333
script: t.Optional[t.Mapping[str, t.Any]] = None,
43284334
scripted_upsert: t.Optional[bool] = None,
4329-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
4335+
source: t.Optional[
4336+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
4337+
] = None,
43304338
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
43314339
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
43324340
timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None,

elasticsearch/_async/client/async_search.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,9 @@ async def submit(
239239
t.Union[str, t.Mapping[str, t.Any]],
240240
]
241241
] = None,
242-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
242+
source: t.Optional[
243+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
244+
] = None,
243245
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
244246
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
245247
stats: t.Optional[t.Sequence[str]] = None,

elasticsearch/_async/client/fleet.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,9 @@ async def search(
293293
t.Union[str, t.Mapping[str, t.Any]],
294294
]
295295
] = None,
296-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
296+
source: t.Optional[
297+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
298+
] = None,
297299
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
298300
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
299301
stats: t.Optional[t.Sequence[str]] = None,

elasticsearch/_async/client/ml.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3582,6 +3582,7 @@ async def start_trained_model_deployment(
35823582
*,
35833583
model_id: str,
35843584
cache_size: t.Optional[t.Union[int, str]] = None,
3585+
deployment_id: t.Optional[str] = None,
35853586
error_trace: t.Optional[bool] = None,
35863587
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
35873588
human: t.Optional[bool] = None,
@@ -3605,6 +3606,7 @@ async def start_trained_model_deployment(
36053606
:param cache_size: The inference cache size (in memory outside the JVM heap)
36063607
per node for the model. The default value is the same size as the `model_size_bytes`.
36073608
To disable the cache, `0b` can be provided.
3609+
:param deployment_id: A unique identifier for the deployment of the model.
36083610
:param number_of_allocations: The number of model allocations on each node where
36093611
the model is deployed. All allocations on a node share the same copy of the
36103612
model in memory but use a separate set of threads to evaluate the model.
@@ -3631,6 +3633,8 @@ async def start_trained_model_deployment(
36313633
__query: t.Dict[str, t.Any] = {}
36323634
if cache_size is not None:
36333635
__query["cache_size"] = cache_size
3636+
if deployment_id is not None:
3637+
__query["deployment_id"] = deployment_id
36343638
if error_trace is not None:
36353639
__query["error_trace"] = error_trace
36363640
if filter_path is not None:

elasticsearch/_sync/client/__init__.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2298,7 +2298,9 @@ def knn_search(
22982298
human: t.Optional[bool] = None,
22992299
pretty: t.Optional[bool] = None,
23002300
routing: t.Optional[str] = None,
2301-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
2301+
source: t.Optional[
2302+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
2303+
] = None,
23022304
stored_fields: t.Optional[t.Union[str, t.Sequence[str]]] = None,
23032305
) -> ObjectApiResponse[t.Any]:
23042306
"""
@@ -3308,7 +3310,9 @@ def search(
33083310
collapse: t.Optional[t.Mapping[str, t.Any]] = None,
33093311
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
33103312
df: t.Optional[str] = None,
3311-
docvalue_fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None,
3313+
docvalue_fields: t.Optional[
3314+
t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]
3315+
] = None,
33123316
error_trace: t.Optional[bool] = None,
33133317
expand_wildcards: t.Optional[
33143318
t.Union[
@@ -3320,7 +3324,7 @@ def search(
33203324
] = None,
33213325
explain: t.Optional[bool] = None,
33223326
ext: t.Optional[t.Mapping[str, t.Any]] = None,
3323-
fields: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None,
3327+
fields: t.Optional[t.Sequence[t.Union[str, t.Mapping[str, t.Any]]]] = None,
33243328
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
33253329
from_: t.Optional[int] = None,
33263330
highlight: t.Optional[t.Mapping[str, t.Any]] = None,
@@ -3368,7 +3372,9 @@ def search(
33683372
t.Union[str, t.Mapping[str, t.Any]],
33693373
]
33703374
] = None,
3371-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
3375+
source: t.Optional[
3376+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
3377+
] = None,
33723378
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
33733379
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
33743380
stats: t.Optional[t.Sequence[str]] = None,
@@ -4324,7 +4330,9 @@ def update(
43244330
routing: t.Optional[str] = None,
43254331
script: t.Optional[t.Mapping[str, t.Any]] = None,
43264332
scripted_upsert: t.Optional[bool] = None,
4327-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
4333+
source: t.Optional[
4334+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
4335+
] = None,
43284336
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
43294337
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
43304338
timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None,

elasticsearch/_sync/client/async_search.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,9 @@ def submit(
239239
t.Union[str, t.Mapping[str, t.Any]],
240240
]
241241
] = None,
242-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
242+
source: t.Optional[
243+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
244+
] = None,
243245
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
244246
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
245247
stats: t.Optional[t.Sequence[str]] = None,

elasticsearch/_sync/client/fleet.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,9 @@ def search(
293293
t.Union[str, t.Mapping[str, t.Any]],
294294
]
295295
] = None,
296-
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
296+
source: t.Optional[
297+
t.Union[bool, t.Mapping[str, t.Any], t.Union[str, t.Sequence[str]]]
298+
] = None,
297299
source_excludes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
298300
source_includes: t.Optional[t.Union[str, t.Sequence[str]]] = None,
299301
stats: t.Optional[t.Sequence[str]] = None,

elasticsearch/_sync/client/ml.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3582,6 +3582,7 @@ def start_trained_model_deployment(
35823582
*,
35833583
model_id: str,
35843584
cache_size: t.Optional[t.Union[int, str]] = None,
3585+
deployment_id: t.Optional[str] = None,
35853586
error_trace: t.Optional[bool] = None,
35863587
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
35873588
human: t.Optional[bool] = None,
@@ -3605,6 +3606,7 @@ def start_trained_model_deployment(
36053606
:param cache_size: The inference cache size (in memory outside the JVM heap)
36063607
per node for the model. The default value is the same size as the `model_size_bytes`.
36073608
To disable the cache, `0b` can be provided.
3609+
:param deployment_id: A unique identifier for the deployment of the model.
36083610
:param number_of_allocations: The number of model allocations on each node where
36093611
the model is deployed. All allocations on a node share the same copy of the
36103612
model in memory but use a separate set of threads to evaluate the model.
@@ -3631,6 +3633,8 @@ def start_trained_model_deployment(
36313633
__query: t.Dict[str, t.Any] = {}
36323634
if cache_size is not None:
36333635
__query["cache_size"] = cache_size
3636+
if deployment_id is not None:
3637+
__query["deployment_id"] = deployment_id
36343638
if error_trace is not None:
36353639
__query["error_trace"] = error_trace
36363640
if filter_path is not None:

0 commit comments

Comments
 (0)