use of com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata in project pinot by linkedin.
the class IndexLoadingConfigMetadataTest method testInvertedIndexConfig.
@Test
public void testInvertedIndexConfig() {
Configuration resourceMetadata = getTestResourceMetadata();
IndexLoadingConfigMetadata indexLoadingConfigMetadata = new IndexLoadingConfigMetadata(resourceMetadata);
Set<String> loadingInvertedIndexColumns = indexLoadingConfigMetadata.getLoadingInvertedIndexColumns();
// System.out.println("loadingInvertedIndexColumns is " + Arrays.toString(loadingInvertedIndexColumns.toArray(new String[0])));
Assert.assertEquals(10, loadingInvertedIndexColumns.size());
for (int j = 0; j < 10; ++j) {
String columnName = "col" + j;
Assert.assertTrue(indexLoadingConfigMetadata.isLoadingInvertedIndexForColumn(columnName));
}
for (int j = 10; j < 20; ++j) {
String columnName = "col" + j;
Assert.assertFalse(indexLoadingConfigMetadata.isLoadingInvertedIndexForColumn(columnName));
}
}
use of com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata in project pinot by linkedin.
the class SegmentPreProcessorTest method setUp.
@BeforeClass
public void setUp() throws Exception {
// For indexLoadingConfigMetadata, we specify two columns without inverted index ('column1', 'column13'), one
// non-existing column ('noSuchColumn') and one column with existed inverted index ('column7').
_indexLoadingConfigMetadata = new IndexLoadingConfigMetadata(new PropertiesConfiguration());
_indexLoadingConfigMetadata.initLoadingInvertedIndexColumnSet(new String[] { COLUMN1_NAME, COLUMN7_NAME, COLUMN13_NAME, NO_SUCH_COLUMN_NAME });
_indexLoadingConfigMetadata.setEnableDefaultColumns(true);
// For newColumnsSchema, we add 4 different data type metric columns with one user-defined default null value, and
// 3 different data type dimension columns with one user-defined default null value and one multi-value column.
ClassLoader classLoader = getClass().getClassLoader();
URL resourceUrl = classLoader.getResource(SCHEMA);
Preconditions.checkNotNull(resourceUrl);
_schema = Schema.fromFile(new File(resourceUrl.getFile()));
resourceUrl = classLoader.getResource(NEW_COLUMNS_SCHEMA1);
Preconditions.checkNotNull(resourceUrl);
_newColumnsSchema1 = Schema.fromFile(new File(resourceUrl.getFile()));
resourceUrl = classLoader.getResource(NEW_COLUMNS_SCHEMA2);
Preconditions.checkNotNull(resourceUrl);
_newColumnsSchema2 = Schema.fromFile(new File(resourceUrl.getFile()));
resourceUrl = classLoader.getResource(NEW_COLUMNS_SCHEMA3);
Preconditions.checkNotNull(resourceUrl);
_newColumnsSchema3 = Schema.fromFile(new File(resourceUrl.getFile()));
}
use of com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata in project pinot by linkedin.
the class SegmentV1V2ToV3FormatConverterTest method setUp.
@BeforeMethod
public void setUp() throws Exception {
INDEX_DIR = Files.createTempDirectory(SegmentV1V2ToV3FormatConverter.class.getName() + "_segmentDir").toFile();
final String filePath = TestUtils.getFileFromResourceUrl(SegmentV1V2ToV3FormatConverter.class.getClassLoader().getResource(AVRO_DATA));
// intentionally changed this to TimeUnit.Hours to make it non-default for testing
final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.HOURS, "testTable");
config.setSegmentNamePostfix("1");
config.setTimeColumnName("daysSinceEpoch");
final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
segmentDirectory = new File(INDEX_DIR, driver.getSegmentName());
File starTreeFile = new File(segmentDirectory, V1Constants.STAR_TREE_INDEX_FILE);
FileUtils.touch(starTreeFile);
FileUtils.writeStringToFile(starTreeFile, "This is a star tree index");
Configuration tableConfig = new PropertiesConfiguration();
tableConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, "v1");
v1LoadingConfig = new IndexLoadingConfigMetadata(tableConfig);
tableConfig.clear();
tableConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, "v3");
v3LoadingConfig = new IndexLoadingConfigMetadata(tableConfig);
}
use of com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata in project pinot by linkedin.
the class LoadersTest method setUp.
@BeforeMethod
public void setUp() throws Exception {
INDEX_DIR = Files.createTempDirectory(LoadersTest.class.getName() + "_segmentDir").toFile();
final String filePath = TestUtils.getFileFromResourceUrl(Loaders.class.getClassLoader().getResource(AVRO_DATA));
final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.HOURS, "testTable");
config.setSegmentNamePostfix("1");
config.setTimeColumnName("daysSinceEpoch");
final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
segmentDirectory = new File(INDEX_DIR, driver.getSegmentName());
Configuration tableConfig = new PropertiesConfiguration();
tableConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, "v1");
v1LoadingConfig = new IndexLoadingConfigMetadata(tableConfig);
tableConfig.clear();
tableConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, "v3");
v3LoadingConfig = new IndexLoadingConfigMetadata(tableConfig);
}
use of com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata in project pinot by linkedin.
the class BitmapInvertedIndexTest method testBitMapInvertedIndex.
void testBitMapInvertedIndex(ReadMode readMode) throws Exception {
IndexLoadingConfigMetadata indexLoadingConfig = new IndexLoadingConfigMetadata(new PropertiesConfiguration());
indexLoadingConfig.initLoadingInvertedIndexColumnSet(invertedIndexColumns);
final IndexSegmentImpl mmapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(segmentDirectory, readMode, indexLoadingConfig);
// compare the loaded inverted index with the record in avro file
final DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(new FileInputStream(new File(getClass().getClassLoader().getResource(AVRO_DATA).getFile())), new GenericDatumReader<GenericRecord>());
int docId = 0;
while (reader.hasNext()) {
final GenericRecord rec = reader.next();
for (final String column : ((SegmentMetadataImpl) mmapSegment.getSegmentMetadata()).getColumnMetadataMap().keySet()) {
Object entry = rec.get(column);
if (entry instanceof Utf8) {
entry = ((Utf8) entry).toString();
}
final int dicId = mmapSegment.getDictionaryFor(column).indexOf(entry);
// make sure that docId for dicId exist in the inverted index
Assert.assertTrue(mmapSegment.getInvertedIndexFor(column).getImmutable(dicId).contains(docId));
final int size = mmapSegment.getDictionaryFor(column).length();
for (int i = 0; i < size; ++i) {
// remove this for-loop for quick test
if (i == dicId) {
continue;
}
// make sure that docId for dicId does not exist in the inverted index
Assert.assertFalse(mmapSegment.getInvertedIndexFor(column).getImmutable(i).contains(docId));
}
}
++docId;
}
}
Aggregations