Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ public class OfflineClusterIntegrationTest extends BaseClusterIntegrationTestSet
private static final String MAX_NUM_MULTI_VALUES_MAP_KEY = "maxNumMultiValuesMap";
// TODO: This might lead to flaky test, as this disk size is not deterministic
// as it depends on the iteration order of a HashSet.
private static final int DISK_SIZE_IN_BYTES = 20796000;
private static final int DISK_SIZE_IN_BYTES = 20797128;
private static final int NUM_ROWS = 115545;

private final List<ServiceStatus.ServiceStatusCallback> _serviceStatusCallbacks =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -856,9 +856,13 @@ public static void addColumnMinMaxValueInfo(PropertiesConfiguration properties,
String maxValue) {
if (isValidPropertyValue(minValue)) {
properties.setProperty(getKeyFor(column, MIN_VALUE), minValue);
} else {
properties.setProperty(getKeyFor(column, MIN_MAX_VALUE_INVALID), true);
}
if (isValidPropertyValue(maxValue)) {
properties.setProperty(getKeyFor(column, MAX_VALUE), maxValue);
} else {
properties.setProperty(getKeyFor(column, MIN_MAX_VALUE_INVALID), true);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ private Set<String> getColumnsToAddMinMaxValue() {
private boolean needAddColumnMinMaxValueForColumn(String columnName) {
ColumnMetadata columnMetadata = _segmentMetadata.getColumnMetadataFor(columnName);
return columnMetadata.hasDictionary() && columnMetadata.getMinValue() == null
&& columnMetadata.getMaxValue() == null;
&& columnMetadata.getMaxValue() == null && !columnMetadata.isMinMaxValueInvalid();
}

private void addColumnMinMaxValueForColumn(String columnName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.pinot.segment.local.segment.readers.GenericRowRecordReader;
import org.apache.pinot.segment.spi.IndexSegment;
import org.apache.pinot.segment.spi.SegmentMetadata;
import org.apache.pinot.segment.spi.V1Constants;
import org.apache.pinot.segment.spi.V1Constants.MetadataKeys.Column;
import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
import org.apache.pinot.spi.config.table.TableConfig;
import org.apache.pinot.spi.config.table.TableType;
Expand All @@ -43,14 +43,12 @@
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;


public class SegmentColumnarIndexCreatorTest {
private static final File TEMP_DIR = new File(FileUtils.getTempDirectory(), "SegmentColumnarIndexCreatorTest");
private static final File CONFIG_FILE = new File(TEMP_DIR, "config");
private static final String PROPERTY_KEY = "testKey";
private static final String COLUMN_NAME = "testColumn";
private static final String COLUMN_PROPERTY_KEY_PREFIX =
V1Constants.MetadataKeys.Column.COLUMN_PROPS_KEY_PREFIX + COLUMN_NAME + ".";
private static final String COLUMN_PROPERTY_KEY_PREFIX = Column.COLUMN_PROPS_KEY_PREFIX + COLUMN_NAME + ".";
private static final int NUM_ROUNDS = 1000;

@BeforeClass
Expand Down Expand Up @@ -175,6 +173,24 @@ private static long getStartTimeInSegmentMetadata(String testDateTimeFormat, Str
}
}

@Test
public void testAddMinMaxValueInvalid() {
PropertiesConfiguration props = new PropertiesConfiguration();
SegmentColumnarIndexCreator.addColumnMinMaxValueInfo(props, "colA", "bar", "foo");
Assert.assertFalse(Boolean.parseBoolean(
String.valueOf(props.getProperty(Column.getKeyFor("colA", Column.MIN_MAX_VALUE_INVALID)))));

props = new PropertiesConfiguration();
SegmentColumnarIndexCreator.addColumnMinMaxValueInfo(props, "colA", ",bar", "foo");
Assert.assertTrue(Boolean.parseBoolean(
String.valueOf(props.getProperty(Column.getKeyFor("colA", Column.MIN_MAX_VALUE_INVALID)))));

props = new PropertiesConfiguration();
SegmentColumnarIndexCreator.addColumnMinMaxValueInfo(props, "colA", "bar", " ");
Assert.assertTrue(Boolean.parseBoolean(
String.valueOf(props.getProperty(Column.getKeyFor("colA", Column.MIN_MAX_VALUE_INVALID)))));
}

@AfterClass
public void tearDown()
throws IOException {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.segment.local.segment.index.creator;

import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
import org.apache.pinot.segment.local.segment.readers.GenericRowRecordReader;
import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
import org.apache.pinot.segment.spi.index.metadata.SegmentMetadataImpl;
import org.apache.pinot.spi.config.table.TableConfig;
import org.apache.pinot.spi.config.table.TableType;
import org.apache.pinot.spi.data.FieldSpec;
import org.apache.pinot.spi.data.Schema;
import org.apache.pinot.spi.data.readers.GenericRow;
import org.apache.pinot.spi.utils.builder.TableConfigBuilder;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;


/**
* Tests filtering of records during segment generation
*/
public class SegmentGenerationWithMinMaxInvalidTest {
private static final String STRING_COLUMN = "col1";
private static final String[] STRING_VALUES_INVALID = {"A,", "B,", "C,", "D,", "E"};
private static final String[] STRING_VALUES_VALID = {"A", "B", "C", "D", "E"};
private static final String LONG_COLUMN = "col2";
private static final long[] LONG_VALUES =
{1588316400000L, 1588489200000L, 1588662000000L, 1588834800000L, 1589007600000L};

private static final String SEGMENT_DIR_NAME =
FileUtils.getTempDirectoryPath() + File.separator + "segmentMinMaxInvalidTest";
private static final String SEGMENT_NAME = "testSegmentMinMaxInvalid";

private Schema _schema;
private TableConfig _tableConfig;

@BeforeClass
public void setup() {
_tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName("testTable").build();
_schema = new Schema.SchemaBuilder().addSingleValueDimension(STRING_COLUMN, FieldSpec.DataType.STRING)
.addMetric(LONG_COLUMN, FieldSpec.DataType.LONG).build();
}

@Test
public void testMinMaxInvalidFlagInMetadata()
throws Exception {
FileUtils.deleteQuietly(new File(SEGMENT_DIR_NAME));
File segmentDir = buildSegment(_tableConfig, _schema, STRING_VALUES_VALID);
SegmentMetadataImpl metadata = new SegmentMetadataImpl(segmentDir);
Assert.assertEquals(metadata.getTotalDocs(), 5);
Assert.assertFalse(metadata.getColumnMetadataFor("col1").isMinMaxValueInvalid());
Assert.assertEquals(metadata.getColumnMetadataFor("col1").getMinValue(), "A");
Assert.assertEquals(metadata.getColumnMetadataFor("col1").getMaxValue(), "E");

FileUtils.deleteQuietly(new File(SEGMENT_DIR_NAME));
segmentDir = buildSegment(_tableConfig, _schema, STRING_VALUES_INVALID);
metadata = new SegmentMetadataImpl(segmentDir);
Assert.assertEquals(metadata.getTotalDocs(), 5);
Assert.assertTrue(metadata.getColumnMetadataFor("col1").isMinMaxValueInvalid());
Assert.assertNull(metadata.getColumnMetadataFor("col1").getMinValue());
Assert.assertNull(metadata.getColumnMetadataFor("col1").getMaxValue());
}

private File buildSegment(final TableConfig tableConfig, final Schema schema, String[] stringValues)
throws Exception {
SegmentGeneratorConfig config = new SegmentGeneratorConfig(tableConfig, schema);
config.setOutDir(SEGMENT_DIR_NAME);
config.setSegmentName(SEGMENT_NAME);

List<GenericRow> rows = new ArrayList<>(3);
for (int i = 0; i < 5; i++) {
GenericRow row = new GenericRow();
row.putValue(STRING_COLUMN, stringValues[i]);
row.putValue(LONG_COLUMN, LONG_VALUES[i]);
rows.add(row);
}

SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new GenericRowRecordReader(rows));
driver.build();
driver.getOutputDirectory().deleteOnExit();
return driver.getOutputDirectory();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,20 +23,24 @@
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils;
import org.apache.pinot.segment.local.segment.creator.SegmentTestUtils;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentCreationDriverFactory;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
import org.apache.pinot.segment.local.segment.index.converter.SegmentV1V2ToV3FormatConverter;
import org.apache.pinot.segment.local.segment.index.loader.columnminmaxvalue.ColumnMinMaxValueGeneratorMode;
import org.apache.pinot.segment.local.segment.readers.GenericRowRecordReader;
import org.apache.pinot.segment.spi.ColumnMetadata;
import org.apache.pinot.segment.spi.V1Constants;
import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
Expand All @@ -59,6 +63,7 @@
import org.apache.pinot.spi.data.FieldSpec;
import org.apache.pinot.spi.data.FieldSpec.DataType;
import org.apache.pinot.spi.data.Schema;
import org.apache.pinot.spi.data.readers.GenericRow;
import org.apache.pinot.spi.env.PinotConfiguration;
import org.apache.pinot.spi.utils.ByteArray;
import org.apache.pinot.spi.utils.ReadMode;
Expand Down Expand Up @@ -1223,14 +1228,84 @@ private void testIfNeedProcess()
});
}

@Test
public void testNeedAddMinMaxValue()
throws Exception {

String[] stringValuesInvalid = {"A,", "B,", "C,", "D,", "E"};
String[] stringValuesValid = {"A", "B", "C", "D", "E"};
long[] longValues = {1588316400000L, 1588489200000L, 1588662000000L, 1588834800000L, 1589007600000L};
TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName("testTable").build();
Schema schema = new Schema.SchemaBuilder().addSingleValueDimension("stringCol", FieldSpec.DataType.STRING)
.addMetric("longCol", FieldSpec.DataType.LONG).build();

FileUtils.deleteQuietly(INDEX_DIR);

// build good segment, no needPreprocess
File segment = buildTestSegmentForMinMax(tableConfig, schema, "validSegment", stringValuesValid, longValues);
SegmentDirectory segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader()
.load(segment.toURI(),
new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build());
IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig();
indexLoadingConfig.setColumnMinMaxValueGeneratorMode(ColumnMinMaxValueGeneratorMode.ALL);
SegmentPreProcessor processor = new SegmentPreProcessor(segmentDirectory, indexLoadingConfig, schema);
assertFalse(processor.needProcess());

// build bad segment, still no needPreprocess, since minMaxInvalid flag should be set
FileUtils.deleteQuietly(INDEX_DIR);
segment = buildTestSegmentForMinMax(tableConfig, schema, "invalidSegment", stringValuesInvalid, longValues);
segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader().load(segment.toURI(),
new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build());
indexLoadingConfig = new IndexLoadingConfig();
indexLoadingConfig.setColumnMinMaxValueGeneratorMode(ColumnMinMaxValueGeneratorMode.NONE);
processor = new SegmentPreProcessor(segmentDirectory, indexLoadingConfig, schema);
assertFalse(processor.needProcess());

indexLoadingConfig.setColumnMinMaxValueGeneratorMode(ColumnMinMaxValueGeneratorMode.ALL);
processor = new SegmentPreProcessor(segmentDirectory, indexLoadingConfig, schema);
assertFalse(processor.needProcess());

// modify metadata, to remove min/max, now needPreprocess
removeMinMaxValuesFromMetadataFile(segment);
segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader().load(segment.toURI(),
new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build());
processor = new SegmentPreProcessor(segmentDirectory, indexLoadingConfig, schema);
assertTrue(processor.needProcess());

FileUtils.deleteQuietly(INDEX_DIR);
}

private File buildTestSegmentForMinMax(final TableConfig tableConfig, final Schema schema, String segmentName,
String[] stringValues, long[] longValues)
throws Exception {
SegmentGeneratorConfig config = new SegmentGeneratorConfig(tableConfig, schema);
config.setOutDir(INDEX_DIR.getAbsolutePath());
config.setSegmentName(segmentName);

List<GenericRow> rows = new ArrayList<>(3);
for (int i = 0; i < 5; i++) {
GenericRow row = new GenericRow();
row.putValue("stringCol", stringValues[i]);
row.putValue("longCol", longValues[i]);
rows.add(row);
}

SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new GenericRowRecordReader(rows));
driver.build();
driver.getOutputDirectory().deleteOnExit();
return driver.getOutputDirectory();
}

private static void removeMinMaxValuesFromMetadataFile(File indexDir)
throws Exception {
PropertiesConfiguration configuration = SegmentMetadataImpl.getPropertiesConfiguration(indexDir);
Iterator<String> keys = configuration.getKeys();
while (keys.hasNext()) {
String key = keys.next();
if (key.endsWith(V1Constants.MetadataKeys.Column.MIN_VALUE) || key.endsWith(
V1Constants.MetadataKeys.Column.MAX_VALUE)) {
V1Constants.MetadataKeys.Column.MAX_VALUE) || key.endsWith(
V1Constants.MetadataKeys.Column.MIN_MAX_VALUE_INVALID)) {
configuration.clearProperty(key);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ default boolean isSingleValue() {

Comparable getMaxValue();

default boolean isMinMaxValueInvalid() {
return false;
}

@JsonProperty
boolean hasDictionary();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ public static class Column {
public static final String DEFAULT_NULL_VALUE = "defaultNullValue";
public static final String MIN_VALUE = "minValue";
public static final String MAX_VALUE = "maxValue";
public static final String MIN_MAX_VALUE_INVALID = "minMaxValueInvalid";
public static final String PARTITION_FUNCTION = "partitionFunction";
public static final String PARTITION_FUNCTION_CONFIG = "partitionFunctionConfig";
public static final String NUM_PARTITIONS = "numPartitions";
Expand Down
Loading