Skip to content

Commit 40ccbb3

Browse files
authored
Merge branch 'master' into type-hints
2 parents 5410616 + 7a8b145 commit 40ccbb3

File tree

15 files changed

+165
-48
lines changed

15 files changed

+165
-48
lines changed

.ci/jobs/defaults.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
wipe-workspace: 'True'
3131
triggers:
3232
- github
33-
- timed: '@daily'
33+
- timed: 'H */12 * * *'
3434
axes:
3535
- axis:
3636
type: slave
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
---
22
- job:
3-
name: elastic+elasticsearch-py+7.7
4-
display-name: 'elastic / elasticsearch-py # 7.7'
5-
description: Testing the elasticsearch-py 7.7 branch.
3+
name: elastic+elasticsearch-py+7.9
4+
display-name: 'elastic / elasticsearch-py # 7.9'
5+
description: Testing the elasticsearch-py 7.9 branch.
66
junit_results: "*-junit.xml"
77
parameters:
88
- string:
99
name: branch_specifier
10-
default: refs/heads/7.7
10+
default: refs/heads/7.9
1111
description: the Git branch specifier to build (<branchName>, <tagName>,
1212
<commitId>, etc.)

Changelog.rst

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,18 @@
33
Changelog
44
=========
55

6+
7.9.0a1 (2020-07-20)
7+
--------------------
8+
9+
* Added support for ES 7.9 APIs
10+
* Fixed retries to not raise an error when ``sniff_on_connection_error=True``
11+
and a ``TransportError`` is raised during the sniff step. Instead the
12+
retry will continue or the error that triggered the retry will be raised
13+
(See `#1279`_ and `#1326`_)
14+
15+
.. _#1326: https://github.com/elastic/elasticsearch-py/pull/1326
16+
.. _#1279: https://github.com/elastic/elasticsearch-py/pull/1279
17+
618
7.8.0 (2020-06-18)
719
------------------
820

docs/index.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ the ``async`` extra:
2323
2424
$ python -m pip install elasticsearch[async]
2525
26-
Read more about `how to use asyncio with this project <async>`_.
26+
Read more about `how to use asyncio with this project <async.html>`_.
2727

2828

2929
Compatibility

elasticsearch/_async/client/indices.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1241,7 +1241,8 @@ async def delete_data_stream(self, name, params=None, headers=None):
12411241
Deletes a data stream.
12421242
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html>`_
12431243
1244-
:arg name: The name of the data stream
1244+
:arg name: A comma-separated list of data streams to delete; use
1245+
`*` to delete all data streams
12451246
"""
12461247
if name in SKIP_IN_PATH:
12471248
raise ValueError("Empty value passed for a required argument 'name'.")
@@ -1370,8 +1371,8 @@ async def get_data_stream(self, name=None, params=None, headers=None):
13701371
Returns data streams.
13711372
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html>`_
13721373
1373-
:arg name: The name or wildcard expression of the requested data
1374-
streams
1374+
:arg name: A comma-separated list of data streams to get; use
1375+
`*` to get all data streams
13751376
"""
13761377
return await self.transport.perform_request(
13771378
"GET", _make_path("_data_stream", name), params=params, headers=headers
@@ -1433,8 +1434,8 @@ async def add_block(self, index, block, params=None, headers=None):
14331434
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-blocks.html>`_
14341435
14351436
:arg index: A comma separated list of indices to add a block to
1436-
:arg block: The block to add (one of read, write, read_only,
1437-
metadata, read_only_allow_delete)
1437+
:arg block: The block to add (one of read, write, read_only or
1438+
metadata)
14381439
:arg allow_no_indices: Whether to ignore if a wildcard indices
14391440
expression resolves into no concrete indices. (This includes `_all`
14401441
string or when no indices have been specified)

elasticsearch/_async/client/ml.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1529,3 +1529,24 @@ async def update_model_snapshot(
15291529
headers=headers,
15301530
body=body,
15311531
)
1532+
1533+
@query_params()
1534+
async def update_data_frame_analytics(self, id, body, params=None, headers=None):
1535+
"""
1536+
Updates certain properties of a data frame analytics job.
1537+
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/update-dfanalytics.html>`_
1538+
1539+
:arg id: The ID of the data frame analytics to update
1540+
:arg body: The data frame analytics settings to update
1541+
"""
1542+
for param in (id, body):
1543+
if param in SKIP_IN_PATH:
1544+
raise ValueError("Empty value passed for a required argument.")
1545+
1546+
return await self.transport.perform_request(
1547+
"POST",
1548+
_make_path("_ml", "data_frame", "analytics", id, "_update"),
1549+
params=params,
1550+
headers=headers,
1551+
body=body,
1552+
)

elasticsearch/_async/client/xpack.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,14 @@ def __getattr__(self, attr_name):
2323
return getattr(self.client, attr_name)
2424

2525
# AUTO-GENERATED-API-DEFINITIONS #
26-
@query_params("categories")
26+
@query_params("accept_enterprise", "categories")
2727
async def info(self, params=None, headers=None):
2828
"""
2929
Retrieves information about the installed X-Pack features.
3030
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/info-api.html>`_
3131
32+
:arg accept_enterprise: Supported for backwards compatibility
33+
with 7.x. If this param is used it must be set to true
3234
:arg categories: Comma-separated list of info categories. Can be
3335
any of: build, license, features
3436
"""

elasticsearch/_async/transport.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -316,20 +316,26 @@ async def perform_request(self, method, url, headers=None, params=None, body=Non
316316
retry = True
317317

318318
if retry:
319-
# only mark as dead if we are retrying
320-
self.mark_dead(connection)
319+
try:
320+
# only mark as dead if we are retrying
321+
self.mark_dead(connection)
322+
except TransportError:
323+
# If sniffing on failure, it could fail too. Catch the
324+
# exception not to interrupt the retries.
325+
pass
321326
# raise exception on last retry
322327
if attempt == self.max_retries:
323-
raise
328+
raise e
324329
else:
325-
raise
330+
raise e
326331

327332
else:
333+
# connection didn't fail, confirm it's live status
334+
self.connection_pool.mark_live(connection)
335+
328336
if method == "HEAD":
329337
return 200 <= status < 300
330338

331-
# connection didn't fail, confirm it's live status
332-
self.connection_pool.mark_live(connection)
333339
if data:
334340
data = self.deserializer.loads(data, headers.get("content-type"))
335341
return data

elasticsearch/client/indices.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1239,7 +1239,8 @@ def delete_data_stream(self, name, params=None, headers=None):
12391239
Deletes a data stream.
12401240
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html>`_
12411241
1242-
:arg name: The name of the data stream
1242+
:arg name: A comma-separated list of data streams to delete; use
1243+
`*` to delete all data streams
12431244
"""
12441245
if name in SKIP_IN_PATH:
12451246
raise ValueError("Empty value passed for a required argument 'name'.")
@@ -1368,8 +1369,8 @@ def get_data_stream(self, name=None, params=None, headers=None):
13681369
Returns data streams.
13691370
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html>`_
13701371
1371-
:arg name: The name or wildcard expression of the requested data
1372-
streams
1372+
:arg name: A comma-separated list of data streams to get; use
1373+
`*` to get all data streams
13731374
"""
13741375
return self.transport.perform_request(
13751376
"GET", _make_path("_data_stream", name), params=params, headers=headers
@@ -1431,8 +1432,8 @@ def add_block(self, index, block, params=None, headers=None):
14311432
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-blocks.html>`_
14321433
14331434
:arg index: A comma separated list of indices to add a block to
1434-
:arg block: The block to add (one of read, write, read_only,
1435-
metadata, read_only_allow_delete)
1435+
:arg block: The block to add (one of read, write, read_only or
1436+
metadata)
14361437
:arg allow_no_indices: Whether to ignore if a wildcard indices
14371438
expression resolves into no concrete indices. (This includes `_all`
14381439
string or when no indices have been specified)

elasticsearch/client/ml.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1515,3 +1515,24 @@ def update_model_snapshot(
15151515
headers=headers,
15161516
body=body,
15171517
)
1518+
1519+
@query_params()
1520+
def update_data_frame_analytics(self, id, body, params=None, headers=None):
1521+
"""
1522+
Updates certain properties of a data frame analytics job.
1523+
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/update-dfanalytics.html>`_
1524+
1525+
:arg id: The ID of the data frame analytics to update
1526+
:arg body: The data frame analytics settings to update
1527+
"""
1528+
for param in (id, body):
1529+
if param in SKIP_IN_PATH:
1530+
raise ValueError("Empty value passed for a required argument.")
1531+
1532+
return self.transport.perform_request(
1533+
"POST",
1534+
_make_path("_ml", "data_frame", "analytics", id, "_update"),
1535+
params=params,
1536+
headers=headers,
1537+
body=body,
1538+
)

elasticsearch/client/xpack.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,14 @@ def __getattr__(self, attr_name):
2323
return getattr(self.client, attr_name)
2424

2525
# AUTO-GENERATED-API-DEFINITIONS #
26-
@query_params("categories")
26+
@query_params("accept_enterprise", "categories")
2727
def info(self, params=None, headers=None):
2828
"""
2929
Retrieves information about the installed X-Pack features.
3030
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/info-api.html>`_
3131
32+
:arg accept_enterprise: Supported for backwards compatibility
33+
with 7.x. If this param is used it must be set to true
3234
:arg categories: Comma-separated list of info categories. Can be
3335
any of: build, license, features
3436
"""

elasticsearch/transport.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -378,13 +378,18 @@ def perform_request(self, method, url, headers=None, params=None, body=None):
378378
retry = True
379379

380380
if retry:
381-
# only mark as dead if we are retrying
382-
self.mark_dead(connection)
381+
try:
382+
# only mark as dead if we are retrying
383+
self.mark_dead(connection)
384+
except TransportError:
385+
# If sniffing on failure, it could fail too. Catch the
386+
# exception not to interrupt the retries.
387+
pass
383388
# raise exception on last retry
384389
if attempt == self.max_retries:
385-
raise
390+
raise e
386391
else:
387-
raise
392+
raise e
388393

389394
else:
390395
# connection didn't fail, confirm it's live status

setup.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,11 @@
5555
author_email="honza.kral@gmail.com, nick@nicklang.com",
5656
maintainer="Seth Michael Larson",
5757
maintainer_email="seth.larson@elastic.co",
58+
project_urls={
59+
"Documentation": "https://elasticsearch-py.readthedocs.io",
60+
"Source Code": "https://github.com/elastic/elasticsearch-py",
61+
"Issue Tracker": "https://github.com/elastic/elasticsearch-py/issues",
62+
},
5863
packages=find_packages(where=".", exclude=("test_elasticsearch*",)),
5964
package_data={"elasticsearch": ["py.typed"]},
6065
include_package_data=True,

test_elasticsearch/test_async/test_transport.py

Lines changed: 32 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,14 @@
1818

1919
from __future__ import unicode_literals
2020
import asyncio
21+
import json
2122
from mock import patch
2223
import pytest
2324

2425
from elasticsearch import AsyncTransport
2526
from elasticsearch.connection import Connection
2627
from elasticsearch.connection_pool import DummyConnectionPool
27-
from elasticsearch.exceptions import ConnectionError
28+
from elasticsearch.exceptions import ConnectionError, TransportError
2829

2930

3031
pytestmark = pytest.mark.asyncio
@@ -273,16 +274,17 @@ async def test_failed_connection_will_be_marked_as_dead(self):
273274
assert 0 == len(t.connection_pool.connections)
274275

275276
async def test_resurrected_connection_will_be_marked_as_live_on_success(self):
276-
t = AsyncTransport([{}, {}], connection_class=DummyConnection)
277-
await t._async_call()
278-
con1 = t.connection_pool.get_connection()
279-
con2 = t.connection_pool.get_connection()
280-
t.connection_pool.mark_dead(con1)
281-
t.connection_pool.mark_dead(con2)
282-
283-
await t.perform_request("GET", "/")
284-
assert 1 == len(t.connection_pool.connections)
285-
assert 1 == len(t.connection_pool.dead_count)
277+
for method in ("GET", "HEAD"):
278+
t = AsyncTransport([{}, {}], connection_class=DummyConnection)
279+
await t._async_call()
280+
con1 = t.connection_pool.get_connection()
281+
con2 = t.connection_pool.get_connection()
282+
t.connection_pool.mark_dead(con1)
283+
t.connection_pool.mark_dead(con2)
284+
285+
await t.perform_request(method, "/")
286+
assert 1 == len(t.connection_pool.connections)
287+
assert 1 == len(t.connection_pool.dead_count)
286288

287289
async def test_sniff_will_use_seed_connections(self):
288290
t = AsyncTransport([{"data": CLUSTER_NODES}], connection_class=DummyConnection)
@@ -368,6 +370,25 @@ async def test_sniff_on_fail_triggers_sniffing_on_fail(self):
368370
assert 1 == len(t.connection_pool.connections)
369371
assert "http://1.1.1.1:123" == t.get_connection().host
370372

373+
@patch("elasticsearch._async.transport.AsyncTransport.sniff_hosts")
374+
async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts):
375+
sniff_hosts.side_effect = [TransportError("sniff failed")]
376+
t = AsyncTransport(
377+
[{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}],
378+
connection_class=DummyConnection,
379+
sniff_on_connection_fail=True,
380+
max_retries=3,
381+
randomize_hosts=False,
382+
)
383+
await t._async_init()
384+
385+
conn_err, conn_data = t.connection_pool.connections
386+
response = await t.perform_request("GET", "/")
387+
assert json.loads(CLUSTER_NODES) == response
388+
assert 1 == sniff_hosts.call_count
389+
assert 1 == len(conn_err.calls)
390+
assert 1 == len(conn_data.calls)
391+
371392
async def test_sniff_after_n_seconds(self, event_loop):
372393
t = AsyncTransport(
373394
[{"data": CLUSTER_NODES}],

0 commit comments

Comments
 (0)