Skip to content

Commit

Permalink
Merge branch 'master' into java-home-sweet-java-home
Browse files Browse the repository at this point in the history
* master:
  Enable skipping fetching latest for BWC builds (elastic#29497)
  Add remote cluster client (elastic#29495)
  Ensure flush happens on shard idle
  Adds SpanGapQueryBuilder in the query DSL (elastic#28636)
  Control max size and count of warning headers (elastic#28427)
  Make index APIs work without types. (elastic#29479)
  Deprecate filtering on `_type`. (elastic#29468)
  Fix auto-generated ID example format (elastic#29461)
  Fix typo in max number of threads check docs (elastic#29469)
  Add primary term to translog header (elastic#29227)
  Add a helper method to get a random java.util.TimeZone (elastic#29487)
  Move TimeValue into elasticsearch-core project (elastic#29486)
  • Loading branch information
jasontedor committed Apr 13, 2018
2 parents d02143a + 03ce3dd commit 062d1ae
Show file tree
Hide file tree
Showing 95 changed files with 1,682 additions and 664 deletions.
7 changes: 7 additions & 0 deletions TESTING.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,13 @@ will contain your change.
. Push both branches to your remote repository.
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.

== Skip fetching latest

For some BWC testing scenarios, you want to use the local clone of the
repository without fetching latest. For these use cases, you can set the system
property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip
fetching the latest from the remote.

== Test coverage analysis

Generating test coverage reports for Elasticsearch is currently not possible through Gradle.
Expand Down
12 changes: 11 additions & 1 deletion distribution/bwc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,16 @@ subprojects {

final String remote = System.getProperty("tests.bwc.remote", "elastic")

final boolean gitFetchLatest
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
if ("true".equals(gitFetchLatestProperty)) {
gitFetchLatest = true
} else if ("false".equals(gitFetchLatestProperty)) {
gitFetchLatest = false
} else {
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
}

task createClone(type: LoggedExec) {
onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir]
Expand Down Expand Up @@ -83,7 +93,7 @@ subprojects {
}

task fetchLatest(type: LoggedExec) {
onlyIf { project.gradle.startParameter.isOffline() == false }
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
dependsOn addRemote
workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all']
Expand Down
12 changes: 6 additions & 6 deletions docs/painless/painless-getting-started.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ their last name:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -260,7 +260,7 @@ names start with a consonant and end with a vowel:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -281,7 +281,7 @@ remove all of the vowels in all of their last names:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -297,7 +297,7 @@ method so it supports `$1` and `\1` for replacements:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -319,7 +319,7 @@ This will make all of the vowels in the hockey player's last names upper case:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -337,7 +337,7 @@ last names upper case:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ Example:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query" : {
"match" : {"content" : "Bird flu"}
Expand Down Expand Up @@ -153,7 +153,7 @@ We can drill down into examples of these documents to see why pozmantier is conn

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query": {
"simple_query_string": {
Expand Down Expand Up @@ -221,7 +221,7 @@ with the `filter_duplicate_text` setting turned on:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query": {
"match": {
Expand Down Expand Up @@ -424,7 +424,7 @@ context:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query" : {
"match" : {
Expand Down Expand Up @@ -463,7 +463,7 @@ will be analyzed using the `source_fields` parameter:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query" : {
"match" : {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ had a value.

[source,js]
--------------------------------------------------
GET latency/data/_search
GET latency/_search
{
"size": 0,
"aggs" : {
Expand Down
8 changes: 4 additions & 4 deletions docs/reference/docs/delete-by-query.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ Back to the API format, this will delete tweets from the `twitter` index:

[source,js]
--------------------------------------------------
POST twitter/_doc/_delete_by_query?conflicts=proceed
POST twitter/_delete_by_query?conflicts=proceed
{
"query": {
"match_all": {}
Expand All @@ -85,12 +85,12 @@ POST twitter/_doc/_delete_by_query?conflicts=proceed
// CONSOLE
// TEST[setup:twitter]

It's also possible to delete documents of multiple indexes and multiple
types at once, just like the search API:
It's also possible to delete documents of multiple indexes at once, just like
the search API:

[source,js]
--------------------------------------------------
POST twitter,blog/_docs,post/_delete_by_query
POST twitter,blog/_delete_by_query
{
"query": {
"match_all": {}
Expand Down
4 changes: 2 additions & 2 deletions docs/reference/docs/index_.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -229,14 +229,14 @@ The result of the above index operation is:
},
"_index" : "twitter",
"_type" : "_doc",
"_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32",
"_id" : "W0tpsmIBdwcYyG50zbta",
"_version" : 1,
"_seq_no" : 0,
"_primary_term" : 1,
"result": "created"
}
--------------------------------------------------
// TESTRESPONSE[s/6a8ca01c-7896-48e9-81cc-9f70661fcb32/$body._id/ s/"successful" : 2/"successful" : 1/]
// TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful" : 2/"successful" : 1/]

[float]
[[index-routing]]
Expand Down
8 changes: 4 additions & 4 deletions docs/reference/docs/update-by-query.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ Back to the API format, this will update tweets from the `twitter` index:

[source,js]
--------------------------------------------------
POST twitter/_doc/_update_by_query?conflicts=proceed
POST twitter/_update_by_query?conflicts=proceed
--------------------------------------------------
// CONSOLE
// TEST[setup:twitter]
Expand Down Expand Up @@ -145,12 +145,12 @@ This API doesn't allow you to move the documents it touches, just modify their
source. This is intentional! We've made no provisions for removing the document
from its original location.

It's also possible to do this whole thing on multiple indexes and multiple
types at once, just like the search API:
It's also possible to do this whole thing on multiple indexes at once, just
like the search API:

[source,js]
--------------------------------------------------
POST twitter,blog/_doc,post/_update_by_query
POST twitter,blog/_update_by_query
--------------------------------------------------
// CONSOLE
// TEST[s/^/PUT twitter\nPUT blog\n/]
Expand Down
2 changes: 1 addition & 1 deletion docs/reference/modules/cluster/misc.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,4 @@ Enable or disable allocation for persistent tasks:
This setting does not affect the persistent tasks that are already being executed.
Only newly created persistent tasks, or tasks that must be reassigned (after a node
left the cluster, for example), are impacted by this setting.
--
--
8 changes: 7 additions & 1 deletion docs/reference/modules/http.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ http://en.wikipedia.org/wiki/Chunked_transfer_encoding[HTTP chunking].

The settings in the table below can be configured for HTTP. Note that none of
them are dynamically updatable so for them to take effect they should be set in
`elasticsearch.yml`.
the Elasticsearch <<settings, configuration file>>.

[cols="<,<",options="header",]
|=======================================================================
Expand Down Expand Up @@ -100,6 +100,12 @@ simple message will be returned. Defaults to `true`

|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before a HTTP connection is closed, defaults to `10000`.

|`http.max_warning_header_count` |The maximum number of warning headers in
client HTTP responses, defaults to unbounded.

|`http.max_warning_header_size` |The maximum total size of warning headers in
client HTTP responses, defaults to unbounded.

|=======================================================================

It also uses the common
Expand Down
12 changes: 1 addition & 11 deletions docs/reference/search/search.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@ that match the query. The query can either be provided using a simple
All search APIs can be applied across multiple types within an index, and
across multiple indices with support for the
<<multi-index,multi index syntax>>. For
example, we can search on all documents across all types within the
twitter index:
example, we can search on all documents within the twitter index:

[source,js]
--------------------------------------------------
Expand All @@ -22,15 +21,6 @@ GET /twitter/_search?q=user:kimchy
// CONSOLE
// TEST[setup:twitter]

We can also search within specific types:

[source,js]
--------------------------------------------------
GET /twitter/tweet,user/_search?q=user:kimchy
--------------------------------------------------
// CONSOLE
// TEST[setup:twitter]

We can also search all tweets with a certain tag across several indices
(for example, when each user has his own index):

Expand Down
2 changes: 1 addition & 1 deletion docs/reference/setup/bootstrap-checks.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ that the Elasticsearch process has the rights to create enough threads
under normal use. This check is enforced only on Linux. If you are on
Linux, to pass the maximum number of threads check, you must configure
your system to allow the Elasticsearch process the ability to create at
least 2048 threads. This can be done via `/etc/security/limits.conf`
least 4096 threads. This can be done via `/etc/security/limits.conf`
using the `nproc` setting (note that you might have to increase the
limits for the `root` user too).

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,12 @@

package org.elasticsearch.common.unit;

import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import java.util.concurrent.TimeUnit;

public class TimeValue implements Comparable<TimeValue>, ToXContentFragment {
public class TimeValue implements Comparable<TimeValue> {

/** How many nano-seconds in one milli-second */
public static final long NSEC_PER_MSEC = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS);
Expand Down Expand Up @@ -352,9 +349,4 @@ public int compareTo(TimeValue timeValue) {
double otherValue = ((double) timeValue.duration) * timeValue.timeUnit.toNanos(1);
return Double.compare(thisValue, otherValue);
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.value(toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,10 @@

package org.elasticsearch.common.unit;

import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;

import java.io.IOException;
import java.util.concurrent.TimeUnit;

import static org.elasticsearch.common.unit.TimeValue.timeValueNanos;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.containsString;
Expand Down Expand Up @@ -154,31 +149,6 @@ private String randomTimeUnit() {
return randomFrom("nanos", "micros", "ms", "s", "m", "h", "d");
}

private void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
out.writeTimeValue(value);
assertEquals(expectedSize, out.size());

StreamInput in = out.bytes().streamInput();
TimeValue inValue = in.readTimeValue();

assertThat(inValue, equalTo(value));
assertThat(inValue.duration(), equalTo(value.duration()));
assertThat(inValue.timeUnit(), equalTo(value.timeUnit()));
}

public void testSerialize() throws Exception {
assertEqualityAfterSerialize(new TimeValue(100, TimeUnit.DAYS), 3);
assertEqualityAfterSerialize(timeValueNanos(-1), 2);
assertEqualityAfterSerialize(timeValueNanos(1), 2);
assertEqualityAfterSerialize(timeValueSeconds(30), 2);

final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values()));
BytesStreamOutput out = new BytesStreamOutput();
out.writeZLong(timeValue.duration());
assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length());
}

public void testFailOnUnknownUnits() {
try {
TimeValue.parseTimeValue("23tw", null, "test");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,12 @@
- do:
search:
index: empty_bucket_idx
type: test

- match: {hits.total: 2}

- do:
search:
index: empty_bucket_idx
type: test
body: {"aggs": {"histo": {"histogram": {"field": "val1", "interval": 1, "min_doc_count": 0}, "aggs": { "mfs" : { "matrix_stats": {"fields": ["value", "val1"]} } } } } }

- match: {hits.total: 2}
Loading

0 comments on commit 062d1ae

Please sign in to comment.