Skip to content

Commit

Permalink
Merge branch 'main' into array-chunked-xcontent-builder
Browse files Browse the repository at this point in the history
  • Loading branch information
thecoop committed Oct 16, 2024
2 parents 475ead5 + 58b588c commit 355680e
Show file tree
Hide file tree
Showing 1,662 changed files with 43,300 additions and 17,224 deletions.
2 changes: 1 addition & 1 deletion .buildkite/pipelines/pull-request/packaging-tests-unix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ steps:
steps:
- label: "{{matrix.image}} / docker / packaging-tests-unix"
key: "packaging-tests-unix-docker"
command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker
command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker-cloud-ess
timeout_in_minutes: 300
matrix:
setup:
Expand Down
1 change: 1 addition & 0 deletions .buildkite/pull-requests.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"admin",
"write"
],
"allowed_list": ["elastic-renovate-prod[bot]"],
"set_commit_status": false,
"build_on_commit": true,
"build_on_comment": true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,13 @@
import org.elasticsearch.compute.data.BooleanBlock;
import org.elasticsearch.compute.data.BooleanVector;
import org.elasticsearch.compute.data.BytesRefBlock;
import org.elasticsearch.compute.data.BytesRefVector;
import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.ElementType;
import org.elasticsearch.compute.data.IntBlock;
import org.elasticsearch.compute.data.IntVector;
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.OrdinalBytesRefVector;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.AggregationOperator;
import org.elasticsearch.compute.operator.DriverContext;
Expand Down Expand Up @@ -78,7 +81,10 @@ public class AggregatorBenchmark {
private static final String DOUBLES = "doubles";
private static final String BOOLEANS = "booleans";
private static final String BYTES_REFS = "bytes_refs";
private static final String ORDINALS = "ordinals";
private static final String TWO_LONGS = "two_" + LONGS;
private static final String TWO_BYTES_REFS = "two_" + BYTES_REFS;
private static final String TWO_ORDINALS = "two_" + ORDINALS;
private static final String LONGS_AND_BYTES_REFS = LONGS + "_and_" + BYTES_REFS;
private static final String TWO_LONGS_AND_BYTES_REFS = "two_" + LONGS + "_and_" + BYTES_REFS;

Expand Down Expand Up @@ -119,7 +125,21 @@ public class AggregatorBenchmark {
}
}

@Param({ NONE, LONGS, INTS, DOUBLES, BOOLEANS, BYTES_REFS, TWO_LONGS, LONGS_AND_BYTES_REFS, TWO_LONGS_AND_BYTES_REFS })
@Param(
{
NONE,
LONGS,
INTS,
DOUBLES,
BOOLEANS,
BYTES_REFS,
ORDINALS,
TWO_LONGS,
TWO_BYTES_REFS,
TWO_ORDINALS,
LONGS_AND_BYTES_REFS,
TWO_LONGS_AND_BYTES_REFS }
)
public String grouping;

@Param({ COUNT, COUNT_DISTINCT, MIN, MAX, SUM })
Expand All @@ -144,8 +164,12 @@ private static Operator operator(DriverContext driverContext, String grouping, S
case INTS -> List.of(new BlockHash.GroupSpec(0, ElementType.INT));
case DOUBLES -> List.of(new BlockHash.GroupSpec(0, ElementType.DOUBLE));
case BOOLEANS -> List.of(new BlockHash.GroupSpec(0, ElementType.BOOLEAN));
case BYTES_REFS -> List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF));
case BYTES_REFS, ORDINALS -> List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF));
case TWO_LONGS -> List.of(new BlockHash.GroupSpec(0, ElementType.LONG), new BlockHash.GroupSpec(1, ElementType.LONG));
case TWO_BYTES_REFS, TWO_ORDINALS -> List.of(
new BlockHash.GroupSpec(0, ElementType.BYTES_REF),
new BlockHash.GroupSpec(1, ElementType.BYTES_REF)
);
case LONGS_AND_BYTES_REFS -> List.of(
new BlockHash.GroupSpec(0, ElementType.LONG),
new BlockHash.GroupSpec(1, ElementType.BYTES_REF)
Expand Down Expand Up @@ -218,6 +242,10 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
checkGroupingBlock(prefix, LONGS, page.getBlock(0));
checkGroupingBlock(prefix, LONGS, page.getBlock(1));
}
case TWO_BYTES_REFS, TWO_ORDINALS -> {
checkGroupingBlock(prefix, BYTES_REFS, page.getBlock(0));
checkGroupingBlock(prefix, BYTES_REFS, page.getBlock(1));
}
case LONGS_AND_BYTES_REFS -> {
checkGroupingBlock(prefix, LONGS, page.getBlock(0));
checkGroupingBlock(prefix, BYTES_REFS, page.getBlock(1));
Expand Down Expand Up @@ -379,7 +407,7 @@ private static void checkGroupingBlock(String prefix, String grouping, Block blo
throw new AssertionError(prefix + "bad group expected [true] but was [" + groups.getBoolean(1) + "]");
}
}
case BYTES_REFS -> {
case BYTES_REFS, ORDINALS -> {
BytesRefBlock groups = (BytesRefBlock) block;
for (int g = 0; g < GROUPS; g++) {
if (false == groups.getBytesRef(g, new BytesRef()).equals(bytesGroup(g))) {
Expand Down Expand Up @@ -508,6 +536,8 @@ private static Block dataBlock(BlockFactory blockFactory, String blockType) {
private static List<Block> groupingBlocks(String grouping, String blockType) {
return switch (grouping) {
case TWO_LONGS -> List.of(groupingBlock(LONGS, blockType), groupingBlock(LONGS, blockType));
case TWO_BYTES_REFS -> List.of(groupingBlock(BYTES_REFS, blockType), groupingBlock(BYTES_REFS, blockType));
case TWO_ORDINALS -> List.of(groupingBlock(ORDINALS, blockType), groupingBlock(ORDINALS, blockType));
case LONGS_AND_BYTES_REFS -> List.of(groupingBlock(LONGS, blockType), groupingBlock(BYTES_REFS, blockType));
case TWO_LONGS_AND_BYTES_REFS -> List.of(
groupingBlock(LONGS, blockType),
Expand Down Expand Up @@ -570,6 +600,19 @@ private static Block groupingBlock(String grouping, String blockType) {
}
yield builder.build();
}
case ORDINALS -> {
IntVector.Builder ordinals = blockFactory.newIntVectorBuilder(BLOCK_LENGTH * valuesPerGroup);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {
ordinals.appendInt(i % GROUPS);
}
}
BytesRefVector.Builder bytes = blockFactory.newBytesRefVectorBuilder(BLOCK_LENGTH * valuesPerGroup);
for (int i = 0; i < GROUPS; i++) {
bytes.appendBytesRef(bytesGroup(i));
}
yield new OrdinalBytesRefVector(ordinals.build(), bytes.build()).asBlock();
}
default -> throw new UnsupportedOperationException("unsupported grouping [" + grouping + "]");
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public void apply(Project project) {
gitInfo.disallowChanges();
gitInfo.finalizeValueOnRead();

revision = gitInfo.map(info -> info.getRevision() == null ? info.getRevision() : "master");
revision = gitInfo.map(info -> info.getRevision() == null ? info.getRevision() : "main");
}

public Property<GitInfo> getGitInfo() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
public class LicensingPlugin implements Plugin<Project> {
static final String ELASTIC_LICENSE_URL_PREFIX = "https://raw.githubusercontent.com/elastic/elasticsearch/";
static final String ELASTIC_LICENSE_URL_POSTFIX = "/licenses/ELASTIC-LICENSE-2.0.txt";
static final String AGPL_ELASTIC_LICENSE_URL_POSTFIX = "/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt";

private ProviderFactory providerFactory;

Expand All @@ -36,15 +37,18 @@ public void apply(Project project) {
isSnapshotVersion(project) ? revision.get() : "v" + project.getVersion()
);

Provider<String> projectLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX +
Provider<String> elasticLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX +
licenseCommit + ELASTIC_LICENSE_URL_POSTFIX);
Provider<String> agplLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX +
licenseCommit + AGPL_ELASTIC_LICENSE_URL_POSTFIX);
// But stick the Elastic license url in project.ext so we can get it if we need to switch to it
project.getExtensions().getExtraProperties().set("elasticLicenseUrl", projectLicenseURL);
project.getExtensions().getExtraProperties().set("elasticLicenseUrl", elasticLicenseURL);

MapProperty<String, String> licensesProperty = project.getObjects().mapProperty(String.class, String.class).convention(
providerFactory.provider(() -> Map.of(
"Server Side Public License, v 1", "https://www.mongodb.com/licensing/server-side-public-license",
"Elastic License 2.0", projectLicenseURL.get())
"Elastic License 2.0", elasticLicenseURL.get(),
"GNU Affero General Public License Version 3", agplLicenseURL.get())
)
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
Expand Down Expand Up @@ -144,6 +149,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
Expand Down Expand Up @@ -228,6 +238,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
Expand Down Expand Up @@ -321,6 +336,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v1.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
Expand Down Expand Up @@ -394,6 +414,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v2.0/licenses/ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>GNU Affero General Public License Version 3</name>
<url>https://raw.githubusercontent.com/elastic/elasticsearch/v2.0/licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
<license>
<name>Server Side Public License, v 1</name>
<url>https://www.mongodb.com/licensing/server-side-public-license</url>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,22 +24,17 @@ public enum DockerBase {
// Base image with extras for Cloud
CLOUD("ubuntu:20.04", "-cloud", "apt-get"),

// Based on CLOUD above, with more extras. We don't set a base image because
// we programmatically extend from the Cloud image.
CLOUD_ESS(null, "-cloud-ess", "apt-get"),

// Chainguard based wolfi image with latest jdk
// This is usually updated via renovatebot
// spotless:off
WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:c16d3ad6cebf387e8dd2ad769f54320c4819fbbaa21e729fad087c7ae223b4d0",
WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:277ebb42c458ef39cb4028f9204f0b3d51d8cd628ea737a65696a1143c3e42fe",
"-wolfi",
"apk"
),
// spotless:on

// Based on WOLFI above, with more extras. We don't set a base image because
// we programmatically extend from the Wolfi image.
WOLFI_ESS(null, "-wolfi-ess", "apk");
// we programmatically extend from the wolfi image.
CLOUD_ESS(null, "-cloud-ess", "apk");

private final String image;
private final String suffix;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,13 @@ public void execute(BuildFinishedFlowAction.Parameters parameters) throws FileNo
// So, if you change this such that the artifact will have a slash/directory in it, you'll need to update the logic
// below as well
pb.directory(uploadFileDir);
pb.start().waitFor();
try {
// we are very generious here, as the upload can take
// a long time depending on its size
pb.start().waitFor(30, java.util.concurrent.TimeUnit.MINUTES);
} catch (InterruptedException e) {
System.out.println("Failed to upload buildkite artifact " + e.getMessage());
}

System.out.println("Generating buildscan link for artifact...");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,8 @@ public void execute(Task t) {
});

if (OS.current().equals(OS.WINDOWS) && System.getProperty("tests.timeoutSuite") == null) {
// override the suite timeout to 30 mins for windows, because it has the most inefficient filesystem known to man
test.systemProperty("tests.timeoutSuite", "2400000!");
// override the suite timeout to 60 mins for windows, because it has the most inefficient filesystem known to man
test.systemProperty("tests.timeoutSuite", "3600000!");
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,6 @@ private static String distributionProjectName(ElasticsearchDistribution distribu
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI) {
return projectName + "wolfi-docker" + archString + "-export";
}
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI_ESS) {
return projectName + "wolfi-ess-docker" + archString + "-export";
}
return projectName + distribution.getType().getName();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ public class InternalElasticsearchDistributionTypes {
public static ElasticsearchDistributionType DOCKER_CLOUD = new DockerCloudElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_WOLFI_ESS = new DockerWolfiEssElasticsearchDistributionType();

public static List<ElasticsearchDistributionType> ALL_INTERNAL = List.of(
DEB,
Expand All @@ -32,7 +31,6 @@ public class InternalElasticsearchDistributionTypes {
DOCKER_IRONBANK,
DOCKER_CLOUD,
DOCKER_CLOUD_ESS,
DOCKER_WOLFI,
DOCKER_WOLFI_ESS
DOCKER_WOLFI
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI_ESS;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM;

/**
Expand Down Expand Up @@ -153,7 +152,6 @@ private static Map<ElasticsearchDistributionType, TaskProvider<?>> lifecycleTask
lifecyleTasks.put(DOCKER_CLOUD, project.getTasks().register(taskPrefix + ".docker-cloud"));
lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess"));
lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi"));
lifecyleTasks.put(DOCKER_WOLFI_ESS, project.getTasks().register(taskPrefix + ".docker-wolfi-ess"));
lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives"));
lifecyleTasks.put(DEB, project.getTasks().register(taskPrefix + ".packages"));
lifecyleTasks.put(RPM, lifecyleTasks.get(DEB));
Expand Down
9 changes: 1 addition & 8 deletions distribution/docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ the [DockerBase] enum.
* UBI - the same as the default image, but based upon [RedHat's UBI
images][ubi], specifically their minimal flavour.
* Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev)
* Wolfi ESS - this directly extends the Wolfi image, and adds all ES plugins
* Cloud ESS - this directly extends the Wolfi image, and adds all ES plugins
that the ES build generates in an archive directory. It also sets an
environment variable that points at this directory. This allows plugins to
be installed from the archive instead of the internet, speeding up
Expand All @@ -23,20 +23,13 @@ the [DockerBase] enum.
software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is
another UBI build, this time on the regular UBI image, with extra
hardening. See below for more details.

* Cloud - this is mostly the same as the default image, with some notable differences:
* `filebeat` and `metricbeat` are included
* `wget` is included
* The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is
`/app/elasticsearch.sh`. In normal use this file would be bind-mounted
in, but the image ships a stub version of this file so that the image
can still be tested.
* Cloud ESS - this directly extends the Cloud image, and adds all ES plugins
that the ES build generates in an archive directory. It also sets an
environment variable that points at this directory. This allows plugins to
be installed from the archive instead of the internet, speeding up
deployment times.

The long-term goal is for both Cloud images to be retired in favour of the
default image.

Expand Down
Loading

0 comments on commit 355680e

Please sign in to comment.