Skip to content

Commit

Permalink
Star tree index validations
Browse files Browse the repository at this point in the history
Signed-off-by: Bharathwaj G <bharath78910@gmail.com>
  • Loading branch information
bharath-techie committed Aug 30, 2024
1 parent 23cba28 commit 21cf04f
Show file tree
Hide file tree
Showing 13 changed files with 516 additions and 25 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -767,6 +767,7 @@ public void apply(Settings value, Settings current, Settings previous) {

// Composite index settings
CompositeIndexSettings.STAR_TREE_INDEX_ENABLED_SETTING,
CompositeIndexSettings.COMPOSITE_INDEX_MAX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING,

SystemTemplatesService.SETTING_APPLICATION_BASED_CONFIGURATION_TEMPLATES_ENABLED,

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,8 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
StarTreeIndexSettings.DEFAULT_METRICS_LIST,
StarTreeIndexSettings.DEFAULT_DATE_INTERVALS,
StarTreeIndexSettings.STAR_TREE_MAX_DATE_INTERVALS_SETTING,
StarTreeIndexSettings.STAR_TREE_MAX_BASE_METRICS_SETTING,
StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING,

// validate that built-in similarities don't get redefined
Setting.groupSetting("index.similarity.", (s) -> {
Expand Down
11 changes: 11 additions & 0 deletions server/src/main/java/org/opensearch/common/settings/Setting.java
Original file line number Diff line number Diff line change
Expand Up @@ -2079,6 +2079,17 @@ public static Setting<ByteSizeValue> byteSizeSetting(
return byteSizeSetting(key, (s) -> defaultValue.getStringRep(), minValue, maxValue, properties);
}

public static Setting<ByteSizeValue> byteSizeSetting(
String key,
ByteSizeValue defaultValue,
ByteSizeValue minValue,
ByteSizeValue maxValue,
Validator<ByteSizeValue> validator,
Property... properties
) {
return new Setting<>(key, defaultValue.getStringRep(), new ByteSizeValueParser(minValue, maxValue, key), validator, properties);
}

public static Setting<ByteSizeValue> byteSizeSetting(
String key,
Function<Settings, String> defaultValue,
Expand Down
43 changes: 42 additions & 1 deletion server/src/main/java/org/opensearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.index.Index;
import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings;
import org.opensearch.index.remote.RemoteStorePathStrategy;
import org.opensearch.index.remote.RemoteStoreUtils;
import org.opensearch.index.translog.Translog;
Expand All @@ -59,8 +60,10 @@

import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
Expand All @@ -70,6 +73,7 @@
import static org.opensearch.Version.V_2_7_0;
import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY;
import static org.opensearch.index.codec.fuzzy.FuzzySetParameters.DEFAULT_FALSE_POSITIVE_PROBABILITY;
import static org.opensearch.index.compositeindex.CompositeIndexSettings.COMPOSITE_INDEX_MAX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING;
import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING;
import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING;
import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING;
Expand Down Expand Up @@ -378,6 +382,37 @@ public static IndexMergePolicy fromString(String text) {
*/
new ByteSizeValue(Translog.DEFAULT_HEADER_SIZE_IN_BYTES + 1, ByteSizeUnit.BYTES),
new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES),
new Setting.Validator<ByteSizeValue>() {
@Override
public void validate(final ByteSizeValue value) {}

@Override
public void validate(ByteSizeValue value, Map<Setting<?>, Object> settings) {
boolean isCompositeIndex = (boolean) settings.get(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING);
ByteSizeValue compositeIndexMaxFlushThreshold = (ByteSizeValue) settings.get(
COMPOSITE_INDEX_MAX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING
);
if (isCompositeIndex && value.compareTo(compositeIndexMaxFlushThreshold) > 0) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"You can configure '%s' with upto '%s' for composite index",
INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(),
compositeIndexMaxFlushThreshold
)
);
}
}

@Override
public Iterator<Setting<?>> settings() {
final List<Setting<?>> settings = Arrays.asList(
StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING,
COMPOSITE_INDEX_MAX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING
);
return settings.iterator();
}
},
Property.Dynamic,
Property.IndexScope
);
Expand Down Expand Up @@ -864,6 +899,8 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) {
*/
private volatile double docIdFuzzySetFalsePositiveProbability;

private final boolean isCompositeIndex;

/**
* Returns the default search fields for this index.
*/
Expand Down Expand Up @@ -1027,7 +1064,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti

setEnableFuzzySetForDocId(scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING));
setDocIdFuzzySetFalsePositiveProbability(scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING));

isCompositeIndex = scopedSettings.get(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING);
scopedSettings.addSettingsUpdateConsumer(
TieredMergePolicyProvider.INDEX_COMPOUND_FORMAT_SETTING,
tieredMergePolicyProvider::setNoCFSRatio
Expand Down Expand Up @@ -1272,6 +1309,10 @@ public int getNumberOfReplicas() {
return settings.getAsInt(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, null);
}

public boolean isCompositeIndex() {
return isCompositeIndex;
}

/**
* Returns true if segment replication is enabled on the index.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.core.common.unit.ByteSizeValue;

/**
* Cluster level settings for composite indices
Expand All @@ -37,12 +39,22 @@ public class CompositeIndexSettings {
Setting.Property.Dynamic
);

/**
* This sets the max flush threshold size for composite index
*/
public static final Setting<ByteSizeValue> COMPOSITE_INDEX_MAX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING = Setting.byteSizeSetting(
"indices.composite_index.translog.max_flush_threshold_size",
new ByteSizeValue(512, ByteSizeUnit.MB),
new ByteSizeValue(128, ByteSizeUnit.MB),
new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES),
Setting.Property.NodeScope
);

private volatile boolean starTreeIndexCreationEnabled;

public CompositeIndexSettings(Settings settings, ClusterSettings clusterSettings) {
this.starTreeIndexCreationEnabled = STAR_TREE_INDEX_ENABLED_SETTING.get(settings);
clusterSettings.addSettingsUpdateConsumer(STAR_TREE_INDEX_ENABLED_SETTING, this::starTreeIndexCreationEnabled);

}

private void starTreeIndexCreationEnabled(boolean value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
public class StarTreeIndexSettings {

public static int STAR_TREE_MAX_DIMENSIONS_DEFAULT = 10;
public static int STAR_TREE_MAX_BASE_METRICS_DEFAULT = 100;
/**
* This setting determines the max number of star tree fields that can be part of composite index mapping. For each
* star tree field, we will generate associated star tree index.
Expand All @@ -52,6 +53,19 @@ public class StarTreeIndexSettings {
Setting.Property.Final
);

/**
* This setting determines the max number of dimensions that can be part of star tree index field. Number of
* dimensions and associated cardinality has direct effect of star tree index size and query performance.
*/
public static final Setting<Integer> STAR_TREE_MAX_BASE_METRICS_SETTING = Setting.intSetting(
"index.composite_index.star_tree.field.max_base_metrics",
STAR_TREE_MAX_BASE_METRICS_DEFAULT,
4,
100,
Setting.Property.IndexScope,
Setting.Property.Final
);

/**
* This setting determines the max number of date intervals that can be part of star tree date field.
*/
Expand Down Expand Up @@ -108,4 +122,10 @@ public static Rounding.DateTimeUnit getTimeUnit(String expression) {
}
return DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expression);
}

public static final Setting<Boolean> IS_COMPOSITE_INDEX_SETTING = Setting.boolSetting(
"index.composite_index",
false,
Setting.Property.IndexScope
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -661,6 +661,17 @@ private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapp
throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token;
// block array values for composite index fields
if (context.indexSettings().isCompositeIndex() && context.mapperService().isCompositeField(arrayFieldName)) {
throw new MapperParsingException(
String.format(
Locale.ROOT,
"object mapping for [%s] with array for [%s] cannot be accepted as field is also part of composite index mapping which does not accept arrays",
mapper.name(),
arrayFieldName
)
);
}
final String[] paths = splitAndValidatePath(lastFieldName);
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ public enum MergeReason {
private final BooleanSupplier idFieldDataEnabled;

private volatile Set<CompositeMappedFieldType> compositeMappedFieldTypes;
private volatile Set<String> fieldsPartOfCompositeMappings;

public MapperService(
IndexSettings indexSettings,
Expand Down Expand Up @@ -547,9 +548,18 @@ private synchronized Map<String, DocumentMapper> internalMerge(DocumentMapper ma

// initialize composite fields post merge
this.compositeMappedFieldTypes = getCompositeFieldTypesFromMapper();
buildCompositeFieldLookup();
return results;
}

private void buildCompositeFieldLookup() {
Set<String> fieldsPartOfCompositeMappings = new HashSet<>();
for (CompositeMappedFieldType fieldType : compositeMappedFieldTypes) {
fieldsPartOfCompositeMappings.addAll(fieldType.fields());
}
this.fieldsPartOfCompositeMappings = fieldsPartOfCompositeMappings;
}

private boolean assertSerialization(DocumentMapper mapper) {
// capture the source now, it may change due to concurrent parsing
final CompressedXContent mappingSource = mapper.mappingSource();
Expand Down Expand Up @@ -676,6 +686,10 @@ private Set<CompositeMappedFieldType> getCompositeFieldTypesFromMapper() {
return compositeMappedFieldTypes;
}

public boolean isCompositeField(String field) {
return fieldsPartOfCompositeMappings.contains(field);
}

public ObjectMapper getObjectMapper(String name) {
return this.mapper == null ? null : this.mapper.objectMappers().get(name);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -440,6 +440,15 @@ protected static void parseCompositeField(
+ " feature flag in the JVM options"
);
}
if (StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.get(parserContext.getSettings()) == false) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Set '%s' as true as part of index settings to use star tree index",
StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.getKey()
)
);
}
Iterator<Map.Entry<String, Object>> iterator = compositeNode.entrySet().iterator();
if (compositeNode.size() > StarTreeIndexSettings.STAR_TREE_MAX_FIELDS_SETTING.get(parserContext.getSettings())) {
throw new IllegalArgumentException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.opensearch.search.lookup.SearchLookup;

import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
Expand Down Expand Up @@ -155,8 +156,20 @@ private List<Dimension> buildDimensions(String fieldName, Map<String, Object> ma
String.format(Locale.ROOT, "Atleast two dimensions are required to build star tree index field [%s]", fieldName)
);
}
Set<String> dimensionFieldNames = new HashSet<>();
for (Object dim : dimList) {
dimensions.add(getDimension(fieldName, dim, context));
Dimension dimension = getDimension(fieldName, dim, context);
if (dimensionFieldNames.add(dimension.getField()) == false) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Duplicate dimension [%s] present as part star tree index field [%s]",
dimension.getField(),
fieldName
)
);
}
dimensions.add(dimension);
}
} else {
throw new MapperParsingException(
Expand Down Expand Up @@ -223,6 +236,7 @@ private List<Metric> buildMetrics(String fieldName, Map<String, Object> map, Map
}
if (metricsFromInput instanceof List<?>) {
List<?> metricsList = (List<?>) metricsFromInput;
Set<String> metricFieldNames = new HashSet<>();
for (Object metric : metricsList) {
Map<String, Object> metricMap = (Map<String, Object>) metric;
String name = (String) XContentMapValues.extractValue(CompositeDataCubeFieldType.NAME, metricMap);
Expand All @@ -232,7 +246,18 @@ private List<Metric> buildMetrics(String fieldName, Map<String, Object> map, Map
}
metricMap.remove(CompositeDataCubeFieldType.NAME);
if (objbuilder == null || objbuilder.mappersBuilders == null) {
metrics.add(getMetric(name, metricMap, context));
Metric metricFromParser = getMetric(name, metricMap, context);
if (metricFieldNames.add(metricFromParser.getField()) == false) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Duplicate metrics [%s] present as part star tree index field [%s]",
metricFromParser.getField(),
fieldName
)
);
}
metrics.add(metricFromParser);
} else {
Optional<Mapper.Builder> meticBuilder = findMapperBuilderByName(name, this.objbuilder.mappersBuilders);
if (meticBuilder.isEmpty()) {
Expand All @@ -243,7 +268,18 @@ private List<Metric> buildMetrics(String fieldName, Map<String, Object> map, Map
String.format(Locale.ROOT, "non-numeric field type is associated with star tree metric [%s]", this.name)
);
}
metrics.add(getMetric(name, metricMap, context));
Metric metricFromParser = getMetric(name, metricMap, context);
if (metricFieldNames.add(metricFromParser.getField()) == false) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Duplicate metrics [%s] present as part star tree index field [%s]",
metricFromParser.getField(),
fieldName
)
);
}
metrics.add(metricFromParser);
DocumentMapperParser.checkNoRemainingFields(
metricMap,
context.indexVersionCreated(),
Expand All @@ -254,6 +290,32 @@ private List<Metric> buildMetrics(String fieldName, Map<String, Object> map, Map
} else {
throw new MapperParsingException(String.format(Locale.ROOT, "unable to parse metrics for star tree field [%s]", this.name));
}
int numBaseMetrics = 0;
for (Metric metric : metrics) {
for (MetricStat metricStat : metric.getMetrics()) {
if (metricStat.isDerivedMetric() == false) {
numBaseMetrics++;
}
}
}
if (numBaseMetrics > context.getSettings()
.getAsInt(
StarTreeIndexSettings.STAR_TREE_MAX_BASE_METRICS_SETTING.getKey(),
StarTreeIndexSettings.STAR_TREE_MAX_BASE_METRICS_DEFAULT
)) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"There cannot be more than [%s] base metrics for star tree field [%s]",
context.getSettings()
.getAsInt(
StarTreeIndexSettings.STAR_TREE_MAX_BASE_METRICS_SETTING.getKey(),
StarTreeIndexSettings.STAR_TREE_MAX_BASE_METRICS_DEFAULT
),
fieldName
)
);
}
Metric docCountMetric = new Metric(DocCountFieldMapper.NAME, List.of(MetricStat.DOC_COUNT));
metrics.add(docCountMetric);
return metrics;
Expand Down
Loading

0 comments on commit 21cf04f

Please sign in to comment.