Skip to content

Commit

Permalink
Migrate index created version to IndexVersion (#96066)
Browse files Browse the repository at this point in the history
  • Loading branch information
thecoop authored Jun 14, 2023
1 parent ed827ad commit 71c1226
Show file tree
Hide file tree
Showing 161 changed files with 765 additions and 685 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
Expand Down Expand Up @@ -148,7 +148,7 @@ public TopDocs benchmark() throws IOException {

private Query scriptScoreQuery(ScoreScript.Factory factory) {
ScoreScript.LeafFactory leafFactory = factory.newFactory(Map.of(), lookup);
return new ScriptScoreQuery(new MatchAllDocsQuery(), null, leafFactory, lookup, null, "test", 0, Version.CURRENT);
return new ScriptScoreQuery(new MatchAllDocsQuery(), null, leafFactory, lookup, null, "test", 0, IndexVersion.CURRENT);
}

private ScoreScript.Factory bareMetalScript() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
package org.elasticsearch.benchmark.vector;

import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.script.field.vectors.BinaryDenseVector;
import org.elasticsearch.script.field.vectors.ByteBinaryDenseVector;
import org.elasticsearch.script.field.vectors.ByteKnnDenseVector;
Expand Down Expand Up @@ -242,7 +242,7 @@ private DotBinaryFloatBenchmarkFunction(int dims) {

@Override
public void execute(Consumer<Object> consumer) {
new BinaryDenseVector(docFloatVector, docVector, dims, Version.CURRENT).dotProduct(queryVector);
new BinaryDenseVector(docFloatVector, docVector, dims, IndexVersion.CURRENT).dotProduct(queryVector);
}
}

Expand Down Expand Up @@ -290,7 +290,7 @@ private CosineBinaryFloatBenchmarkFunction(int dims) {

@Override
public void execute(Consumer<Object> consumer) {
new BinaryDenseVector(docFloatVector, docVector, dims, Version.CURRENT).cosineSimilarity(queryVector, false);
new BinaryDenseVector(docFloatVector, docVector, dims, IndexVersion.CURRENT).cosineSimilarity(queryVector, false);
}
}

Expand Down Expand Up @@ -338,7 +338,7 @@ private L1BinaryFloatBenchmarkFunction(int dims) {

@Override
public void execute(Consumer<Object> consumer) {
new BinaryDenseVector(docFloatVector, docVector, dims, Version.CURRENT).l1Norm(queryVector);
new BinaryDenseVector(docFloatVector, docVector, dims, IndexVersion.CURRENT).l1Norm(queryVector);
}
}

Expand Down Expand Up @@ -386,7 +386,7 @@ private L2BinaryFloatBenchmarkFunction(int dims) {

@Override
public void execute(Consumer<Object> consumer) {
new BinaryDenseVector(docFloatVector, docVector, dims, Version.CURRENT).l1Norm(queryVector);
new BinaryDenseVector(docFloatVector, docVector, dims, IndexVersion.CURRENT).l1Norm(queryVector);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory;
Expand Down Expand Up @@ -260,7 +261,7 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings) {
@Override
public TokenStream create(TokenStream tokenStream) {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_8_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead."
Expand Down Expand Up @@ -301,7 +302,7 @@ public TokenStream create(TokenStream tokenStream) {
return new NGramTokenFilterFactory(indexSettings, environment, name, settings) {
@Override
public TokenStream create(TokenStream tokenStream) {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_8_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) {
throw new IllegalArgumentException(
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead."
Expand Down Expand Up @@ -381,12 +382,12 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
tokenizers.put("thai", ThaiTokenizerFactory::new);
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_8_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated in 7.6. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead."
);
} else if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
} else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"nGram_tokenizer_deprecation",
Expand All @@ -398,12 +399,12 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
});
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_8_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated in 7.6. "
+ "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead."
);
} else if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
} else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"edgeNGram_tokenizer_deprecation",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

package org.elasticsearch.datastreams;

import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
Expand Down Expand Up @@ -38,6 +37,7 @@
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettingProviders;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.Mapping;
Expand Down Expand Up @@ -220,7 +220,7 @@ public void setup() throws Exception {
null,
ScriptCompiler.NONE,
false,
Version.CURRENT
IndexVersion.CURRENT
).build(MapperBuilderContext.root(false));
RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS);
root.add(
Expand All @@ -230,7 +230,7 @@ public void setup() throws Exception {
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
ScriptCompiler.NONE,
true,
Version.CURRENT
IndexVersion.CURRENT
)
);
MetadataFieldMapper dtfm = DataStreamTestHelper.getDataStreamTimestampFieldMapper();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
*/
package org.elasticsearch.datastreams.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.datastreams.DataStreamsPlugin;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapper;
Expand Down Expand Up @@ -170,7 +170,7 @@ public void testValidateDefaultIgnoreMalformed() throws Exception {
Settings indexSettings = Settings.builder().put(FieldMapper.IGNORE_MALFORMED_SETTING.getKey(), true).build();
Exception e = expectThrows(
IllegalArgumentException.class,
() -> createMapperService(Version.CURRENT, indexSettings, () -> true, timestampMapping(true, b -> {
() -> createMapperService(IndexVersion.CURRENT, indexSettings, () -> true, timestampMapping(true, b -> {
b.startObject("@timestamp");
b.field("type", "date");
b.endObject();
Expand All @@ -181,7 +181,7 @@ public void testValidateDefaultIgnoreMalformed() throws Exception {
equalTo("data stream timestamp field [@timestamp] has disallowed [ignore_malformed] attribute specified")
);

MapperService mapperService = createMapperService(Version.CURRENT, indexSettings, () -> true, timestampMapping(true, b -> {
MapperService mapperService = createMapperService(IndexVersion.CURRENT, indexSettings, () -> true, timestampMapping(true, b -> {
b.startObject("@timestamp");
b.field("type", "date");
b.field("ignore_malformed", false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.GeometryFormatterFactory;
Expand All @@ -29,6 +28,7 @@
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper;
import org.elasticsearch.index.mapper.DocumentParserContext;
Expand Down Expand Up @@ -220,9 +220,9 @@ public static class Builder extends FieldMapper.Builder {

Parameter<Map<String, String>> meta = Parameter.metaParam();

private final Version indexCreatedVersion;
private final IndexVersion indexCreatedVersion;

public Builder(String name, Version version, boolean ignoreMalformedByDefault, boolean coerceByDefault) {
public Builder(String name, IndexVersion version, boolean ignoreMalformedByDefault, boolean coerceByDefault) {
super(name);

if (ShapesAvailability.JTS_AVAILABLE == false || ShapesAvailability.SPATIAL4J_AVAILABLE == false) {
Expand All @@ -243,7 +243,7 @@ public Builder(String name, Version version, boolean ignoreMalformedByDefault, b
});

// Set up serialization
if (version.onOrAfter(Version.V_7_0_0)) {
if (version.onOrAfter(IndexVersion.V_7_0_0)) {
this.strategy.alwaysSerialize();
}
// serialize treeLevels if treeLevels is configured, OR if defaults are requested and precision is not configured
Expand Down Expand Up @@ -542,7 +542,7 @@ public PrefixTreeStrategy resolvePrefixTreeStrategy(String strategyName) {
}
}

private final Version indexCreatedVersion;
private final IndexVersion indexCreatedVersion;
private final Builder builder;

public LegacyGeoShapeFieldMapper(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.elasticsearch.geometry.Line;
import org.elasticsearch.geometry.MultiLine;
import org.elasticsearch.geometry.MultiPoint;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper;
import org.elasticsearch.legacygeo.parsers.ShapeParser;
Expand Down Expand Up @@ -384,7 +385,7 @@ public void testParse3DPolygon() throws IOException, ParseException {
Version randomVersion = VersionUtils.randomIndexCompatibleVersion(random());
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null);
final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
final IndexVersion version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0).indexVersion;
final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build(
MapperBuilderContext.root(false)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.elasticsearch.geometry.Line;
import org.elasticsearch.geometry.MultiLine;
import org.elasticsearch.geometry.MultiPoint;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.legacygeo.builders.CoordinatesBuilder;
import org.elasticsearch.legacygeo.builders.EnvelopeBuilder;
Expand Down Expand Up @@ -292,9 +293,8 @@ public void testParseMixedDimensionPolyWithHole() throws IOException, ParseExcep
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();

final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", Version.CURRENT, false, true).build(
MapperBuilderContext.root(false)
);
final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", IndexVersion.CURRENT, false, true)
.build(MapperBuilderContext.root(false));

// test store z disabled
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> ShapeParser.parse(parser, mapperBuilder));
Expand Down Expand Up @@ -324,7 +324,7 @@ public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException {
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();

final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
final IndexVersion version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0).indexVersion;
final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build(
MapperBuilderContext.root(false)
);
Expand All @@ -348,7 +348,7 @@ public void testParsePolyWithStoredZ() throws IOException {
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();

final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
final IndexVersion version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0).indexVersion;
final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build(
MapperBuilderContext.root(false)
);
Expand All @@ -364,7 +364,7 @@ public void testParseOpenPolygon() throws IOException {
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();

final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
final IndexVersion version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0).indexVersion;
final LegacyGeoShapeFieldMapper defaultMapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).coerce(
false
).build(MapperBuilderContext.root(false));
Expand All @@ -374,9 +374,12 @@ public void testParseOpenPolygon() throws IOException {
);
assertEquals("invalid LinearRing found (coordinates are not closed)", exception.getMessage());

final LegacyGeoShapeFieldMapper coercingMapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", Version.CURRENT, false, true)
.coerce(true)
.build(MapperBuilderContext.root(false));
final LegacyGeoShapeFieldMapper coercingMapperBuilder = new LegacyGeoShapeFieldMapper.Builder(
"test",
IndexVersion.CURRENT,
false,
true
).coerce(true).build(MapperBuilderContext.root(false));
ShapeBuilder<?, ?, ?> shapeBuilder = ShapeParser.parse(parser, coercingMapperBuilder);
assertNotNull(shapeBuilder);
assertEquals("polygon ((100.0 5.0, 100.0 10.0, 90.0 10.0, 90.0 5.0, 100.0 5.0))", shapeBuilder.toWKT());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
Expand Down Expand Up @@ -108,8 +109,8 @@ protected boolean supportsMeta() {
}

@Override
protected Version getVersion() {
return VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
protected IndexVersion getVersion() {
return VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0).indexVersion;
}

public void testLegacySwitches() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import org.elasticsearch.Version;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperBuilderContext;
Expand All @@ -35,7 +36,7 @@ public void testSetStrategyName() {
}

public void testFetchSourceValue() throws IOException {
Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
IndexVersion version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0).indexVersion;
MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build(
MapperBuilderContext.root(false)
).fieldType();
Expand Down
Loading

0 comments on commit 71c1226

Please sign in to comment.