Skip to content

Placeholder fields - alternative impl #85600

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,15 @@
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperRegistry;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptCompiler;
import org.elasticsearch.script.ScriptService;
Expand Down Expand Up @@ -126,8 +129,10 @@ private static void checkSupportedVersion(IndexMetadata indexMetadata, Version m
* Note that we don't expect users to encounter mapping incompatibilities, since our index compatibility
* policy guarantees we can read mappings from previous compatible index versions. A failure here would
* indicate a compatibility bug (which are unfortunately not that uncommon).
* @return the mapping
*/
private void checkMappingsCompatibility(IndexMetadata indexMetadata) {
@Nullable
public Mapping checkMappingsCompatibility(IndexMetadata indexMetadata) {
try {

// We cannot instantiate real analysis server or similarity service at this point because the node
Expand Down Expand Up @@ -194,6 +199,8 @@ public Set<Entry<String, NamedAnalyzer>> entrySet() {
scriptService
);
mapperService.merge(indexMetadata, MapperService.MergeReason.MAPPING_RECOVERY);
DocumentMapper documentMapper = mapperService.documentMapper();
return documentMapper == null ? null : documentMapper.mapping();
}
} catch (Exception ex) {
// Wrap the inner exception so we have the index name in the exception message
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,10 @@ public BooleanFieldType(String name, boolean isIndexed, boolean hasDocValues) {
this(name, isIndexed, isIndexed, hasDocValues, false, null, Collections.emptyMap());
}

public BooleanFieldType(String name, Map<String, String> meta) {
this(name, false, false, true, false, null, meta);
}

@Override
public String typeName() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -411,6 +411,10 @@ public DateFieldType(String name, Resolution resolution, DateFormatter dateForma
this(name, true, false, true, dateFormatter, resolution, null, null, Collections.emptyMap());
}

public DateFieldType(String name, Resolution resolution, DateFormatter dateFormatter, Map<String, String> meta) {
this(name, false, false, true, dateFormatter, resolution, null, null, meta);
}

@Override
public String typeName() {
return resolution.type();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -458,14 +458,14 @@ public static MultiFields empty() {

public static class Builder {

private final Map<String, Function<MapperBuilderContext, FieldMapper>> mapperBuilders = new HashMap<>();
private final Map<String, Function<MapperBuilderContext, Mapper>> mapperBuilders = new HashMap<>();

public Builder add(FieldMapper.Builder builder) {
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder::build);
return this;
}

public Builder add(FieldMapper mapper) {
public Builder add(Mapper mapper) {
mapperBuilders.put(mapper.simpleName(), context -> mapper);
return this;
}
Expand All @@ -474,7 +474,7 @@ public Builder update(FieldMapper toMerge, MapperBuilderContext context) {
if (mapperBuilders.containsKey(toMerge.simpleName()) == false) {
add(toMerge);
} else {
FieldMapper existing = mapperBuilders.get(toMerge.simpleName()).apply(context);
Mapper existing = mapperBuilders.get(toMerge.simpleName()).apply(context);
add(existing.merge(toMerge));
}
return this;
Expand All @@ -490,10 +490,13 @@ public MultiFields build(Mapper.Builder mainFieldBuilder, MapperBuilderContext c
} else {
Map<String, FieldMapper> mappers = new HashMap<>();
context = context.createChildContext(mainFieldBuilder.name());
for (Map.Entry<String, Function<MapperBuilderContext, FieldMapper>> entry : this.mapperBuilders.entrySet()) {
for (Map.Entry<String, Function<MapperBuilderContext, Mapper>> entry : this.mapperBuilders.entrySet()) {
String key = entry.getKey();
FieldMapper mapper = entry.getValue().apply(context);
mappers.put(key, mapper);
Mapper mapper = entry.getValue().apply(context);
if (mapper instanceof FieldMapper == false) {
throw new MapperParsingException("Field [" + mapper.name() + "] cannot be used in multi field");
}
mappers.put(key, (FieldMapper) mapper);
}
return new MultiFields(Collections.unmodifiableMap(mappers));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,10 @@ public GeoPointFieldType(String name) {
this(name, true, false, true, null, null, Collections.emptyMap());
}

public GeoPointFieldType(String name, Map<String, String> meta) {
this(name, false, false, true, null, null, meta);
}

@Override
public String typeName() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,10 @@ public IpFieldType(String name, boolean isIndexed, boolean hasDocValues) {
this(name, isIndexed, false, hasDocValues, null, null, Collections.emptyMap(), false);
}

public IpFieldType(String name, Map<String, String> meta) {
this(name, false, false, true, null, null, meta, false);
}

@Override
public String typeName() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,11 @@ public KeywordFieldType(String name, FieldType fieldType) {
}

public KeywordFieldType(String name, NamedAnalyzer analyzer) {
super(name, true, false, true, new TextSearchInfo(Defaults.FIELD_TYPE, null, analyzer, analyzer), Collections.emptyMap());
this(name, true, analyzer, Collections.emptyMap());
}

public KeywordFieldType(String name, boolean isIndexed, NamedAnalyzer analyzer, Map<String, String> meta) {
super(name, isIndexed, false, true, new TextSearchInfo(Defaults.FIELD_TYPE, null, analyzer, analyzer), meta);
this.normalizer = Lucene.KEYWORD_ANALYZER;
this.ignoreAbove = Integer.MAX_VALUE;
this.nullValue = null;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.index.mapper;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;

public final class LegacyMapperTypeParsers {

private static final Logger logger = LogManager.getLogger(LegacyMapperTypeParsers.class);

public static final LegacyMapperTypeParsers INSTANCE = new LegacyMapperTypeParsers(); // TODO make this pluggable?

private LegacyMapperTypeParsers() {}

public Mapper.TypeParser getParser(String fieldType) {
return switch (fieldType) {
case "boolean" -> BOOLEAN;
case "date", "date_nanos" -> DATE;
case "half_float", "float", "double", "byte", "short", "integer", "long" -> NUMBER;
case "geo_point" -> GEO_POINT;
case "ip" -> IP;
case "keyword" -> KEYWORD;
default -> PLACEHOLDER;
};
}

@SuppressWarnings("unchecked")
private static Map<String, String> meta(Map<String, Object> node) {
if (node.containsKey("meta") == false) {
return Collections.emptyMap();
}
Object meta = node.get("meta");
if (meta instanceof Map<?, ?> == false) {
throw new MapperParsingException("meta parameter must be a map, but found" + meta);
}
return (Map<String, String>) meta;
}

private static final LegacyTypeParser KEYWORD = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
NamedAnalyzer analyzer = Lucene.KEYWORD_ANALYZER;
String normalizer = XContentMapValues.nodeStringValue(node.get("normalizer"), "default");
if (Objects.equals("normalizer", "default") == false) {
analyzer = context.getIndexAnalyzers().getNormalizer(normalizer);
if (analyzer == null) {
logger.warn(
new ParameterizedMessage("Could not find normalizer [{}] of legacy index, falling back to default", normalizer)
);
analyzer = Lucene.KEYWORD_ANALYZER;
}
}
return new KeywordFieldMapper.KeywordFieldType(name, false, analyzer, meta(node));
}
};

private static final LegacyTypeParser IP = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
return new IpFieldMapper.IpFieldType(name, meta(node));
}
};

private static final LegacyTypeParser BOOLEAN = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
return new BooleanFieldMapper.BooleanFieldType(name, meta(node));
}
};

private static final LegacyTypeParser DATE = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
DateFieldMapper.Resolution resolution = DateFieldMapper.Resolution.MILLISECONDS;
if (Objects.equals("date_nanos", node.get("type"))) {
resolution = DateFieldMapper.Resolution.NANOSECONDS;
}
DateFormatter dateFormatter = context.getDateFormatter();
if (dateFormatter == null) {
dateFormatter = resolution == DateFieldMapper.Resolution.NANOSECONDS
? DateFieldMapper.DEFAULT_DATE_TIME_NANOS_FORMATTER
: DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER;
}
String format = XContentMapValues.nodeStringValue(node.get("format"));
String locale = XContentMapValues.nodeStringValue(node.get("locale"));
if (format != null || locale != null) {
format = format == null ? dateFormatter.pattern() : format;
Locale l = locale == null ? dateFormatter.locale() : LocaleUtils.parse(locale);
try {
dateFormatter = DateFormatter.forPattern(format).withLocale(l);
} catch (IllegalArgumentException e) {
logger.warn(new ParameterizedMessage("Error parsing format [{}] of legacy index, falling back to default", format), e);
}
}
return new DateFieldMapper.DateFieldType(name, resolution, dateFormatter, meta(node));
}
};

private static final LegacyTypeParser NUMBER = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
String t = (String) node.get("type");
NumberFieldMapper.NumberType type = NumberFieldMapper.NumberType.valueOf(t.toUpperCase(Locale.ROOT));
return new NumberFieldMapper.NumberFieldType(name, type, meta(node));
}
};

private static final LegacyTypeParser GEO_POINT = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
return new GeoPointFieldMapper.GeoPointFieldType(name, meta(node));
}
};

private static final LegacyTypeParser PLACEHOLDER = new LegacyTypeParser() {
@Override
protected MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
return new LegacyPlaceHolderFieldType(name, (String) node.get("type"), meta(node));
}
};

abstract static class LegacyTypeParser implements Mapper.TypeParser {

@Override
public final Mapper.Builder parse(String name, Map<String, Object> node, MappingParserContext parserContext)
throws MapperParsingException {
FieldMapper.MultiFields.Builder multiFieldsBuilder = new FieldMapper.MultiFields.Builder();
if (node.containsKey("fields")) {
TypeParsers.parseMultiField(multiFieldsBuilder::add, name, parserContext, "fields", node.get("fields"));
node.remove("fields");
}
Map<String, Object> nodeCopy = new LinkedHashMap<>(node); // preserve order
node.clear(); // we may ignore some params during parsing of legacy mappings, so we just clear everything here
return new Mapper.Builder(name) {
@Override
public Mapper build(MapperBuilderContext context) {
FieldMapper.MultiFields multiFields = multiFieldsBuilder.build(this, context);
MappedFieldType fieldType = buildMappedFieldType(context.buildFullName(name), nodeCopy, parserContext);
return new LegacyFieldMapper(name, fieldType, multiFields, nodeCopy);
}
};
}

protected final MappedFieldType buildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context) {
if (XContentMapValues.nodeBooleanValue(node.get("doc_values"), "doc_values", true) == false) {
return new LegacyPlaceHolderFieldType(name, (String) node.get("type"), meta(node));
}
return doBuildMappedFieldType(name, node, context);
}

protected abstract MappedFieldType doBuildMappedFieldType(String name, Map<String, Object> node, MappingParserContext context);
}

private static class LegacyFieldMapper extends FieldMapper {

final Map<String, Object> params;

protected LegacyFieldMapper(
String simpleName,
MappedFieldType mappedFieldType,
MultiFields multiFields,
Map<String, Object> params
) {
super(simpleName, mappedFieldType, multiFields, CopyTo.empty()); // TODO do we need copy_to for value fetching?
this.params = params;
}

@Override
protected void parseCreateField(DocumentParserContext context) throws IOException {
throw new UnsupportedOperationException("Legacy field mappers to not support indexing");
}

@Override
public Builder getMergeBuilder() {
throw new UnsupportedOperationException("Legacy field mappers do not support merging");
}

@Override
protected String contentType() {
return (String) params.get("type");
}

@Override
protected void doXContentBody(XContentBuilder builder, Params params) throws IOException {
for (var entry : this.params.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
multiFields.toXContent(builder, params);
}
}

}
Loading