Skip to content

Disable fielddata on text fields by defaults. #17386

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
import java.util.Map;
import java.util.Objects;

import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;

/** A {@link FieldMapper} for full-text fields. */
Expand All @@ -60,7 +59,6 @@ public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new TextFieldType();

static {
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.freeze();
}

Expand Down Expand Up @@ -177,8 +175,8 @@ public static final class TextFieldType extends MappedFieldType {
private int fielddataMinSegmentSize;

public TextFieldType() {
// TODO: change the default to false
fielddata = true;
setTokenized(true);
fielddata = false;
fielddataMinFrequency = Defaults.FIELDDATA_MIN_FREQUENCY;
fielddataMaxFrequency = Defaults.FIELDDATA_MAX_FREQUENCY;
fielddataMinSegmentSize = Defaults.FIELDDATA_MIN_SEGMENT_SIZE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ public void testEmptyFilter() throws Exception {

public void testSearchingFilteringAliasesSingleIndex() throws Exception {
logger.info("--> creating index [test]");
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text"));
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text,fielddata=true"));

ensureGreen();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ public void testEagerGlobalOrdinalsFieldDataLoading() throws Exception {
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("name")
.field("type", "text")
.field("fielddata", true)
.field("eager_global_ordinals", true)
.endObject()
.endObject().endObject().endObject()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ public void testFilterByFrequency() throws Exception {
{
ifdService.clear();
MappedFieldType ft = new TextFieldMapper.Builder("high_freq")
.fielddata(true)
.fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0)
.build(builderCtx).fieldType();
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
Expand All @@ -76,6 +77,7 @@ public void testFilterByFrequency() throws Exception {
{
ifdService.clear();
MappedFieldType ft = new TextFieldMapper.Builder("high_freq")
.fielddata(true)
.fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d/200.0d, 201, 100)
.build(builderCtx).fieldType();
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
Expand All @@ -88,6 +90,7 @@ public void testFilterByFrequency() throws Exception {
{
ifdService.clear(); // test # docs with value
MappedFieldType ft = new TextFieldMapper.Builder("med_freq")
.fielddata(true)
.fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d/200.0d, Integer.MAX_VALUE, 101)
.build(builderCtx).fieldType();
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
Expand All @@ -101,6 +104,7 @@ public void testFilterByFrequency() throws Exception {
{
ifdService.clear();
MappedFieldType ft = new TextFieldMapper.Builder("med_freq")
.fielddata(true)
.fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d/200.0d, Integer.MAX_VALUE, 101)
.build(builderCtx).fieldType();
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ private XContentBuilder createDynamicTemplateMapping() throws IOException {
.startObject().startObject("template_all")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject("mapping").field("type", "text").field("copy_to", "{name}_raw").endObject()
.startObject("mapping").field("type", "text").field("fielddata", true)
.field("copy_to", "{name}_raw").endObject()
.endObject().endObject()

.endArray();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -405,22 +405,33 @@ public void testFielddata() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("fielddata", false)
.endObject().endObject()
.endObject().endObject().string();

DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

assertEquals(mapping, mapper.mappingSource().toString());
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, disabledMapper.mappingSource().toString());
IllegalStateException e = expectThrows(IllegalStateException.class,
() -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder());
() -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder());
assertThat(e.getMessage(), containsString("Fielddata is disabled"));

mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("fielddata", true)
.endObject().endObject()
.endObject().endObject().string();

DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(mapping));

assertEquals(mapping, enabledMapper.mappingSource().toString());
enabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder(); // no exception this time
}

public void testFrequencyFilter() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("fielddata", true)
.startObject("fielddata_frequency_filter")
.field("min", 2d)
.field("min_segment_size", 1000)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,8 @@ public void testMemoryBreaker() throws Exception {
logger.info("--> noop breakers used, skipping test");
return;
}
assertAcked(prepareCreate("cb-test", 1, settingsBuilder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1))));
assertAcked(prepareCreate("cb-test", 1, settingsBuilder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)))
.addMapping("type", "test", "type=text,fielddata=true"));
final Client client = client();

// index some different terms so we have some field data for loading
Expand Down Expand Up @@ -148,7 +149,7 @@ public void testRamAccountingTermsEnum() throws Exception {

// Create an index where the mappings have a field data filter
assertAcked(prepareCreate("ramtest").setSource("{\"mappings\": {\"type\": {\"properties\": {\"test\": " +
"{\"type\": \"text\",\"fielddata_frequency_filter\": {\"max\": 10000}}}}}}"));
"{\"type\": \"text\",\"fielddata\": true,\"fielddata_frequency_filter\": {\"max\": 10000}}}}}}"));

ensureGreen("ramtest");

Expand Down Expand Up @@ -197,7 +198,8 @@ public void testParentChecking() throws Exception {
logger.info("--> noop breakers used, skipping test");
return;
}
assertAcked(prepareCreate("cb-test", 1, settingsBuilder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1))));
assertAcked(prepareCreate("cb-test", 1, settingsBuilder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)))
.addMapping("type", "test", "type=text,fielddata=true"));
Client client = client();

// index some different terms so we have some field data for loading
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,7 @@
import java.util.EnumSet;
import java.util.Random;

import static org.elasticsearch.cluster.metadata.IndexMetaData.PROTO;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
Expand Down Expand Up @@ -97,7 +94,10 @@ private Settings.Builder settingsBuilder() {
}

public void testFieldDataStats() {
client().admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("index.number_of_shards", 2)).execute().actionGet();
assertAcked(client().admin().indices().prepareCreate("test")
.setSettings(settingsBuilder().put("index.number_of_shards", 2))
.addMapping("type", "field", "type=text,fielddata=true",
"field2", "type=text,fielddata=true").get());
ensureGreen();
client().prepareIndex("test", "type", "1").setSource("field", "value1", "field2", "value1").execute().actionGet();
client().prepareIndex("test", "type", "2").setSource("field", "value2", "field2", "value2").execute().actionGet();
Expand Down Expand Up @@ -141,9 +141,9 @@ public void testFieldDataStats() {
}

public void testClearAllCaches() throws Exception {
client().admin().indices().prepareCreate("test")
assertAcked(client().admin().indices().prepareCreate("test")
.setSettings(settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2))
.execute().actionGet();
.addMapping("type", "field", "type=text,fielddata=true").get());
ensureGreen();
client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client().prepareIndex("test", "type", "1").setSource("field", "value1").execute().actionGet();
Expand Down Expand Up @@ -719,7 +719,9 @@ public void testMultiIndex() throws Exception {
}

public void testFieldDataFieldsParam() throws Exception {
createIndex("test1");
assertAcked(client().admin().indices().prepareCreate("test1")
.addMapping("type", "bar", "type=text,fielddata=true",
"baz", "type=text,fielddata=true").get());

ensureGreen();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {

// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
public void testAggregations() throws Exception {
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=text"));
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=keyword"));
ensureGreen();

int numQueries = scaledRandomIntBetween(250, 500);
Expand Down Expand Up @@ -119,7 +119,7 @@ public void testAggregations() throws Exception {

// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
public void testAggregationsAndPipelineAggregations() throws Exception {
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=text"));
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=keyword"));
ensureGreen();

int numQueries = scaledRandomIntBetween(250, 500);
Expand Down Expand Up @@ -204,7 +204,7 @@ public void testSignificantAggs() throws Exception {

public void testSingleShardAggregations() throws Exception {
assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))
.addMapping("type", "field1", "type=text", "field2", "type=text"));
.addMapping("type", "field1", "type=text", "field2", "type=keyword"));
ensureGreen();

int numQueries = scaledRandomIntBetween(250, 500);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class AggregationsIntegrationIT extends ESIntegTestCase {

@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(prepareCreate("index").addMapping("type", "f", "type=text").get());
assertAcked(prepareCreate("index").addMapping("type", "f", "type=keyword").get());
ensureYellow("index");
numDocs = randomIntBetween(1, 20);
List<IndexRequestBuilder> docs = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio
prepareCreate("idx").addMapping("type", jsonBuilder()
.startObject()
.startObject("type").startObject("properties")
.startObject("name").field("type", "text").endObject()
.startObject("name").field("type", "keyword").endObject()
.startObject("value").field("type", "integer").endObject()
.endObject().endObject()
.endObject()).execute().actionGet();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,15 @@
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;


public class MetaDataIT extends ESIntegTestCase {

public void testMetaDataSetOnAggregationResult() throws Exception {
createIndex("idx");
assertAcked(client().admin().indices().prepareCreate("idx")
.addMapping("type", "name", "type=keyword").get());
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
for (int i = 0; i < builders.length; i++) {
String name = "name_" + randomIntBetween(1, 10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ protected int maximumNumberOfShards() {

@Override
protected void setupSuiteScopeCluster() throws Exception {
assertAcked(prepareCreate("idx").addMapping("type", "date", "type=date", "location", "type=geo_point").get());
assertAcked(prepareCreate("idx").addMapping("type", "date", "type=date", "location", "type=geo_point", "str", "type=keyword").get());
indexRandom(true,
client().prepareIndex("idx", "type", "1").setSource(),
client().prepareIndex("idx", "type", "2").setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ public class ChildrenIT extends ESIntegTestCase {
public void setupSuiteScopeCluster() throws Exception {
assertAcked(
prepareCreate("test")
.addMapping("article")
.addMapping("comment", "_parent", "type=article")
.addMapping("article", "category", "type=keyword")
.addMapping("comment", "_parent", "type=article", "commenter", "type=keyword")
);

List<IndexRequestBuilder> requests = new ArrayList<>();
Expand Down Expand Up @@ -295,8 +295,8 @@ public void testPostCollection() throws Exception {
String childType = "variantsku";
assertAcked(
prepareCreate(indexName)
.addMapping(masterType, "brand", "type=text", "name", "type=text", "material", "type=text")
.addMapping(childType, "_parent", "type=masterprod", "color", "type=text", "size", "type=text")
.addMapping(masterType, "brand", "type=text", "name", "type=keyword", "material", "type=text")
.addMapping(childType, "_parent", "type=masterprod", "color", "type=keyword", "size", "type=keyword")
);

List<IndexRequestBuilder> requests = new ArrayList<>();
Expand Down Expand Up @@ -358,7 +358,7 @@ public void testHierarchicalChildrenAggs() {
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
)
.addMapping(grandParentType)
.addMapping(grandParentType, "name", "type=keyword")
.addMapping(parentType, "_parent", "type=" + grandParentType)
.addMapping(childType, "_parent", "type=" + parentType)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import static org.elasticsearch.search.aggregations.AggregationBuilders.avg;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.missing;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
Expand All @@ -49,7 +50,8 @@ public class MissingIT extends ESIntegTestCase {

@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx");
assertAcked(client().admin().indices().prepareCreate("idx")
.addMapping("type", "tag", "type=keyword").get());
List<IndexRequestBuilder> builders = new ArrayList<>();
numDocs = randomIntBetween(5, 20);
numDocsMissing = randomIntBetween(1, numDocs - 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.core.IsNull.notNullValue;

Expand Down Expand Up @@ -108,7 +109,8 @@ public String sortKey() {

@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx");
assertAcked(client().admin().indices().prepareCreate("idx")
.addMapping("type", "string_value", "type=keyword").get());
final int numDocs = randomIntBetween(2, 10);
for (int i = 0; i < numDocs; ++i) {
final long value = randomInt(5);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ public void testNestedAsSubAggregation() throws Exception {
public void testNestNestedAggs() throws Exception {
SearchResponse response = client().prepareSearch("idx_nested_nested_aggs")
.addAggregation(nested("level1", "nested1")
.subAggregation(terms("a").field("nested1.a")
.subAggregation(terms("a").field("nested1.a.keyword")
.collectMode(aggCollectionMode)
.subAggregation(nested("level2", "nested1.nested2")
.subAggregation(sum("sum").field("nested1.nested2.b")))))
Expand Down Expand Up @@ -463,7 +463,7 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception {
assertAcked(
prepareCreate("idx4")
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))
.addMapping("product", "categories", "type=text", "name", "type=text", "property", "type=nested")
.addMapping("product", "categories", "type=keyword", "name", "type=text", "property", "type=nested")
);
ensureGreen("idx4");

Expand Down
Loading