Skip to content

Commit

Permalink
Add version for 5.0.0
Browse files Browse the repository at this point in the history
This commit adds the version constant for 5.0.0.

Relates #21244
  • Loading branch information
jasontedor committed Nov 1, 2016
1 parent a612e59 commit 7751049
Show file tree
Hide file tree
Showing 13 changed files with 94 additions and 26 deletions.
4 changes: 4 additions & 0 deletions core/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,8 @@ public class Version {
public static final Version V_5_0_0_beta1 = new Version(V_5_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final int V_5_0_0_rc1_ID = 5000051;
public static final Version V_5_0_0_rc1 = new Version(V_5_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final int V_5_0_0_ID = 5000099;
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final int V_6_0_0_alpha1_ID = 6000001;
public static final Version V_6_0_0_alpha1 = new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_3_0);
public static final Version CURRENT = V_6_0_0_alpha1;
Expand All @@ -115,6 +117,8 @@ public static Version fromId(int id) {
switch (id) {
case V_6_0_0_alpha1_ID:
return V_6_0_0_alpha1;
case V_5_0_0_ID:
return V_5_0_0;
case V_5_0_0_rc1_ID:
return V_5_0_0_rc1;
case V_5_0_0_beta1_ID:
Expand Down
1 change: 0 additions & 1 deletion core/src/test/java/org/elasticsearch/VersionTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,6 @@ public void testUnknownVersions() {
assertUnknownVersion(OsStats.V_5_1_0); // once we released 5.1.0 and it's added to Version.java we need to remove this constant
assertUnknownVersion(SimpleQueryStringBuilder.V_5_1_0_UNRELEASED);
// once we released 5.0.0 and it's added to Version.java we need to remove this constant
assertUnknownVersion(ShardValidateQueryRequestTests.V_5_0_0);
}

public static void assertUnknownVersion(Version version) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
package org.elasticsearch.action;

import org.elasticsearch.Version;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
Expand Down Expand Up @@ -92,7 +93,7 @@ public void testSerialize50Request() throws IOException {
.decode("AAABBWluZGV4BHR5cGUCaWQBDHNvbWVfcm91dGluZwEOdGhlX3ByZWZlcmVuY2UEdGVybT" +
"+AAAAABWZpZWxkFQV2YWx1ZQIGYWxpYXMwBmFsaWFzMQECBmZpZWxkMQZmaWVsZDIBAQEIZmllbGQxLioBCGZpZWxkMi4qAA"));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
in.setVersion(Version.V_5_0_0);
ExplainRequest readRequest = new ExplainRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
Expand All @@ -104,7 +105,7 @@ public void testSerialize50Request() throws IOException {
assertEquals(request.routing(), readRequest.routing());
assertEquals(request.fetchSourceContext(), readRequest.fetchSourceContext());
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
import java.util.List;

public class ShardValidateQueryRequestTests extends ESTestCase {
public static final Version V_5_0_0 = Version.fromId(5000099);

protected NamedWriteableRegistry namedWriteableRegistry;
protected SearchRequestParsers searchRequestParsers;
Expand Down Expand Up @@ -94,7 +93,7 @@ public void testSerialize50Request() throws IOException {
// this is a base64 encoded request generated with the same input
.decode("AAVpbmRleAZmb29iYXIBAQdpbmRpY2VzBAR0ZXJtP4AAAAAFZmllbGQVBXZhbHVlAgV0eXBlMQV0eXBlMgIGYWxpYXMwBmFsaWFzMQABAA"));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(V_5_0_0);
in.setVersion(Version.V_5_0_0);
ShardValidateQueryRequest readRequest = new ShardValidateQueryRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
Expand All @@ -106,7 +105,7 @@ public void testSerialize50Request() throws IOException {
assertEquals(request.rewrite(), readRequest.rewrite());
assertEquals(request.shardId(), readRequest.shardId());
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(V_5_0_0);
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
import java.util.TreeSet;

import static org.elasticsearch.test.OldIndexUtils.assertUpgradeWorks;
import static org.elasticsearch.test.OldIndexUtils.getIndexDir;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;

Expand Down Expand Up @@ -445,8 +446,15 @@ private Path getNodeDir(String indexFile) throws IOException {
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}

// the bwc scripts packs the indices under this path
return list[0].resolve("nodes/0/");
int zipIndex = indexFile.indexOf(".zip");
final Version version = Version.fromString(indexFile.substring("index-".length(), zipIndex));
if (version.before(Version.V_5_0_0_alpha1)) {
// the bwc scripts packs the indices under this path
return list[0].resolve("nodes/0/");
} else {
// after 5.0.0, data folders do not include the cluster name
return list[0].resolve("0");
}
}

public void testOldClusterStates() throws Exception {
Expand Down Expand Up @@ -481,9 +489,19 @@ public IndexMetaData fromXContent(XContentParser parser) throws IOException {
String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-");
Path nodeDir = getNodeDir(indexFile);
logger.info("Parsing cluster state files from index [{}]", indexName);
assertNotNull(globalFormat.loadLatestState(logger, nodeDir)); // no exception
Path indexDir = nodeDir.resolve("indices").resolve(indexName);
assertNotNull(indexFormat.loadLatestState(logger, indexDir)); // no exception
final MetaData metaData = globalFormat.loadLatestState(logger, nodeDir);
assertNotNull(metaData);

final Version version = Version.fromString(indexName.substring("index-".length()));
final Path dataDir;
if (version.before(Version.V_5_0_0_alpha1)) {
dataDir = nodeDir.getParent().getParent();
} else {
dataDir = nodeDir.getParent();
}
final Path indexDir = getIndexDir(logger, indexName, indexFile, dataDir);
assertNotNull(indexFormat.loadLatestState(logger, indexDir));
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.bwcompat;

import org.elasticsearch.Version;
import org.elasticsearch.common.io.FileTestUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets;
Expand Down Expand Up @@ -70,7 +71,12 @@ public void testRepositoryWorksWithCrossVersions() throws Exception {
final Set<SnapshotInfo> snapshotInfos = Sets.newHashSet(getSnapshots(repoName));
assertThat(snapshotInfos.size(), equalTo(1));
SnapshotInfo originalSnapshot = snapshotInfos.iterator().next();
assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", "test_1")));
if (Version.fromString(version).before(Version.V_5_0_0_alpha1)) {
assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", "test_1")));
} else {
assertThat(originalSnapshot.snapshotId().getName(), equalTo("test_1"));
assertNotNull(originalSnapshot.snapshotId().getUUID()); // it's a random UUID now
}
assertThat(Sets.newHashSet(originalSnapshot.indices()), equalTo(indices));

logger.info("--> restore the original snapshot");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,11 +211,11 @@ public void testUpgradeRealIndex() throws IOException, URISyntaxException {
throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length);
}
// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
Path src = OldIndexUtils.getIndexDir(logger, indexName, path.getFileName().toString(), list[0]);
assertTrue("[" + path + "] missing index dir: " + src.toString(), Files.exists(src));
final Path indicesPath = randomFrom(nodeEnvironment.nodePaths()).indicesPath;
logger.info("--> injecting index [{}] into [{}]", indexName, indicesPath);
OldIndexUtils.copyIndex(logger, src, indexName, indicesPath);
OldIndexUtils.copyIndex(logger, src, src.getFileName().toString(), indicesPath);
IndexFolderUpgrader.upgradeIndicesIfNeeded(Settings.EMPTY, nodeEnvironment);

// ensure old index folder is deleted
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1856,6 +1856,7 @@ private Mapping dynamicUpdate() {
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap());
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21147")
public void testUpgradeOldIndex() throws IOException {
List<Path> indexes = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ public void testSerialize50Request() throws IOException {
"ZXJtP4AAAAANbUtDSnpHU3lidm5KUBUMaVpqeG9vcm5QSFlvAAEBLGdtcWxuRWpWTXdvTlhMSHh0RWlFdHBnbEF1cUNmVmhoUVlwRFZxVllnWWV1A2ZvbwEA" +
"AQhwYWlubGVzc/8AALk4AAAAAAABAAAAAAAAAwpKU09PU0ZmWnhFClVqTGxMa2p3V2gKdUJwZ3R3dXFER5Hg97uT7MOmPgEADw"));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
in.setVersion(Version.V_5_0_0);
ShardSearchTransportRequest readRequest = new ShardSearchTransportRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
Expand All @@ -214,7 +214,7 @@ public void testSerialize50Request() throws IOException {
.should(QueryBuilders.termQuery("foo", "bar2"))
);
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}
Expand Down
Binary file not shown.
Binary file not shown.
15 changes: 13 additions & 2 deletions dev-tools/create_bwc_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,12 +265,20 @@ def generate_index(client, version, index_name):
mappings['doc'] = {'properties' : {}}
supports_dots_in_field_names = parse_version(version) >= parse_version("2.4.0")
if supports_dots_in_field_names:
mappings["doc"]['properties'].update({

if parse_version(version) < parse_version("5.0.0-alpha1"):
mappings["doc"]['properties'].update({
'field.with.dots': {
'type': 'string',
'boost': 4
}
})
else:
mappings["doc"]['properties'].update({
'field.with.dots': {
'type': 'text'
}
})

if parse_version(version) < parse_version("5.0.0-alpha1"):
mappings['norms'] = {
Expand Down Expand Up @@ -339,7 +347,10 @@ def generate_index(client, version, index_name):
if warmers:
body['warmers'] = warmers
client.indices.create(index=index_name, body=body)
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
if parse_version(version) < parse_version("5.0.0-alpha1"):
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
else:
health = client.cluster.health(wait_for_status='green', wait_for_no_relocating_shards=True)
assert health['timed_out'] == False, 'cluster health timed out %s' % health

num_docs = random.randint(2000, 3000)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.elasticsearch.action.admin.indices.upgrade.get.IndexUpgradeStatus;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.Loggers;
Expand All @@ -56,7 +57,10 @@
import static junit.framework.TestCase.assertTrue;
import static org.elasticsearch.test.ESTestCase.randomInt;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;


public class OldIndexUtils {
Expand Down Expand Up @@ -103,10 +107,35 @@ public static void loadIndex(String indexName, String indexFile, Path unzipDir,
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}

// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
copyIndex(logger, src, indexName, paths);
final Path src = getIndexDir(logger, indexName, indexFile, list[0]);
copyIndex(logger, src, src.getFileName().toString(), paths);
}

public static Path getIndexDir(
final Logger logger,
final String indexName,
final String indexFile,
final Path dataDir) throws IOException {
final Version version = Version.fromString(indexName.substring("index-".length()));
if (version.before(Version.V_5_0_0_alpha1)) {
// the bwc scripts packs the indices under this path
Path src = dataDir.resolve("nodes/0/indices/" + indexName);
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
return src;
} else {
final List<Path> indexFolders = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dataDir.resolve("0/indices"))) {
for (final Path path : stream) {
indexFolders.add(path);
}
}
assertThat(indexFolders.size(), equalTo(1));
final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, indexFolders.get(0));
assertNotNull(indexMetaData);
assertThat(indexFolders.get(0).getFileName().toString(), equalTo(indexMetaData.getIndexUUID()));
assertThat(indexMetaData.getCreationVersion(), equalTo(version));
return indexFolders.get(0);
}
}

public static void assertNotUpgraded(Client client, String... index) throws Exception {
Expand All @@ -128,10 +157,10 @@ public static Collection<IndexUpgradeStatus> getUpgradeStatus(Client client, Str
}

// randomly distribute the files from src over dests paths
public static void copyIndex(final Logger logger, final Path src, final String indexName, final Path... dests) throws IOException {
public static void copyIndex(final Logger logger, final Path src, final String folderName, final Path... dests) throws IOException {
Path destinationDataPath = dests[randomInt(dests.length - 1)];
for (Path dest : dests) {
Path indexDir = dest.resolve(indexName);
Path indexDir = dest.resolve(folderName);
assertFalse(Files.exists(indexDir));
Files.createDirectories(indexDir);
}
Expand All @@ -140,7 +169,7 @@ public static void copyIndex(final Logger logger, final Path src, final String i
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
Path relativeDir = src.relativize(dir);
for (Path dest : dests) {
Path destDir = dest.resolve(indexName).resolve(relativeDir);
Path destDir = dest.resolve(folderName).resolve(relativeDir);
Files.createDirectories(destDir);
}
return FileVisitResult.CONTINUE;
Expand All @@ -155,7 +184,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO
}

Path relativeFile = src.relativize(file);
Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile);
Path destFile = destinationDataPath.resolve(folderName).resolve(relativeFile);
logger.trace("--> Moving {} to {}", relativeFile, destFile);
Files.move(file, destFile);
assertFalse(Files.exists(file));
Expand Down

0 comments on commit 7751049

Please sign in to comment.