use of com.linkedin.pinot.core.common.DataSource in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test3.
@Test
public void test3() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
final DataSource ds = segment.getDataSource("column7");
final Block bl = ds.nextBlock();
final BlockValSet valSet = bl.getBlockValueSet();
final int maxValue = ((SegmentMetadataImpl) segment.getSegmentMetadata()).getColumnMetadataFor("column7").getMaxNumberOfMultiValues();
final BlockMultiValIterator it = (BlockMultiValIterator) valSet.iterator();
while (it.hasNext()) {
final int[] entry = new int[maxValue];
it.nextIntVal(entry);
LOGGER.trace(Arrays.toString(entry));
}
}
use of com.linkedin.pinot.core.common.DataSource in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test5.
@Test(enabled = false)
public void test5() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
final List<String> rhs = new ArrayList<String>();
rhs.add("-100");
final Predicate p = new EqPredicate("column1", rhs);
final DataSource ds = segment.getDataSource("column1", p);
final Block bl = ds.nextBlock();
final BlockDocIdSet idSet = bl.getBlockDocIdSet();
final BlockDocIdIterator it = idSet.iterator();
int docId = it.next();
final StringBuilder b = new StringBuilder();
while (docId != Constants.EOF) {
b.append(docId + ",");
docId = it.next();
}
// System.out.println(b.toString());
}
use of com.linkedin.pinot.core.common.DataSource in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test2.
@Test
public void test2() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
// System.out.println("INdex dir:" + INDEX_DIR);
final DataSource ds = segment.getDataSource("column1");
final Block bl = ds.nextBlock();
final BlockValSet valSet = bl.getBlockValueSet();
final BlockSingleValIterator it = (BlockSingleValIterator) valSet.iterator();
// TODO: FIXME - load segment with known data and verify that it exists
while (it.hasNext()) {
LOGGER.trace(Integer.toString(it.nextIntVal()));
}
}
use of com.linkedin.pinot.core.common.DataSource in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test6.
@Test(enabled = false)
public void test6() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
final ImmutableDictionaryReader d = segment.getDictionaryFor("column7");
final List<String> rhs = new ArrayList<String>();
rhs.add(d.get(new Random().nextInt(d.length())).toString());
final Predicate p = new EqPredicate("column7", rhs);
final DataSource ds = segment.getDataSource("column7", p);
final Block bl = ds.nextBlock();
final BlockDocIdSet idSet = bl.getBlockDocIdSet();
final BlockDocIdIterator it = idSet.iterator();
int docId = it.next();
final StringBuilder b = new StringBuilder();
while (docId != Constants.EOF) {
b.append(docId + ",");
docId = it.next();
}
// System.out.println(b.toString());
}
use of com.linkedin.pinot.core.common.DataSource in project pinot by linkedin.
the class DefaultGroupKeyGeneratorTest method setup.
@BeforeClass
private void setup() throws Exception {
GenericRow[] segmentData = new GenericRow[NUM_ROWS];
int value = _random.nextInt(MAX_STEP_LENGTH);
// Generate random values for the segment.
for (int i = 0; i < UNIQUE_ROWS; i++) {
Map<String, Object> map = new HashMap<>();
for (String singleValueColumn : SINGLE_VALUE_COLUMNS) {
map.put(singleValueColumn, value);
value += 1 + _random.nextInt(MAX_STEP_LENGTH);
}
for (String multiValueColumn : MULTI_VALUE_COLUMNS) {
int numMultiValues = 1 + _random.nextInt(MAX_NUM_MULTI_VALUES);
Integer[] values = new Integer[numMultiValues];
for (int k = 0; k < numMultiValues; k++) {
values[k] = value;
value += 1 + _random.nextInt(MAX_STEP_LENGTH);
}
map.put(multiValueColumn, values);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
segmentData[i] = genericRow;
}
for (int i = UNIQUE_ROWS; i < NUM_ROWS; i += UNIQUE_ROWS) {
System.arraycopy(segmentData, 0, segmentData, i, UNIQUE_ROWS);
}
// Create an index segment with the random values.
Schema schema = new Schema();
for (String singleValueColumn : SINGLE_VALUE_COLUMNS) {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(singleValueColumn, FieldSpec.DataType.INT, true);
schema.addField(dimensionFieldSpec);
}
for (String multiValueColumn : MULTI_VALUE_COLUMNS) {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(multiValueColumn, FieldSpec.DataType.INT, false);
schema.addField(dimensionFieldSpec);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
FileUtils.deleteQuietly(new File(INDEX_DIR_PATH));
config.setOutDir(INDEX_DIR_PATH);
config.setSegmentName(SEGMENT_NAME);
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new TestDataRecordReader(schema, segmentData));
driver.build();
IndexSegment indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR_PATH, SEGMENT_NAME), ReadMode.heap);
// Get a data fetcher for the index segment.
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
Map<String, Block> blockMap = new HashMap<>();
for (String column : indexSegment.getColumnNames()) {
DataSource dataSource = indexSegment.getDataSource(column);
dataSourceMap.put(column, dataSource);
blockMap.put(column, dataSource.getNextBlock());
}
// Generate a random test doc id set.
int num1 = _random.nextInt(50);
int num2 = num1 + 1 + _random.nextInt(50);
for (int i = 0; i < 20; i += 2) {
_testDocIdSet[i] = num1 + 50 * i;
_testDocIdSet[i + 1] = num2 + 50 * i;
}
DataFetcher dataFetcher = new DataFetcher(dataSourceMap);
DocIdSetBlock docIdSetBlock = new DocIdSetBlock(_testDocIdSet, _testDocIdSet.length);
ProjectionBlock projectionBlock = new ProjectionBlock(blockMap, new DataBlockCache(dataFetcher), docIdSetBlock);
_transformBlock = new TransformBlock(projectionBlock, new HashMap<String, BlockValSet>());
}
Aggregations