use of com.linkedin.pinot.core.common.BlockValSet in project pinot by linkedin.
the class ScanBasedFilterOperator method nextFilterBlock.
@Override
public BaseFilterBlock nextFilterBlock(BlockId BlockId) {
DataSourceMetadata dataSourceMetadata = dataSource.getDataSourceMetadata();
FilterBlockDocIdSet docIdSet;
Block nextBlock = dataSource.nextBlock();
BlockValSet blockValueSet = nextBlock.getBlockValueSet();
BlockMetadata blockMetadata = nextBlock.getMetadata();
if (dataSourceMetadata.isSingleValue()) {
docIdSet = new ScanBasedSingleValueDocIdSet(dataSource.getOperatorName(), blockValueSet, blockMetadata, predicateEvaluator);
} else {
docIdSet = new ScanBasedMultiValueDocIdSet(dataSource.getOperatorName(), blockValueSet, blockMetadata, predicateEvaluator);
}
if (startDocId != null) {
docIdSet.setStartDocId(startDocId);
}
if (endDocId != null) {
docIdSet.setEndDocId(endDocId);
}
return new ScanBlock(docIdSet);
}
use of com.linkedin.pinot.core.common.BlockValSet in project pinot by linkedin.
the class TransformExpressionOperatorTest method evaluateExpression.
/**
* Helper method to evaluate one expression using the TransformOperator.
* @param expression Expression to evaluate
* @return Result of evaluation
*/
private double[] evaluateExpression(String expression) {
Operator filterOperator = new MatchEntireSegmentOperator(_indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, _indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(_indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
List<TransformExpressionTree> expressionTrees = new ArrayList<>(1);
expressionTrees.add(compiler.compileToExpressionTree(expression));
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, expressionTrees);
transformOperator.open();
TransformBlock transformBlock = (TransformBlock) transformOperator.getNextBlock();
BlockValSet blockValueSet = transformBlock.getBlockValueSet(expression);
double[] actual = blockValueSet.getDoubleValuesSV();
transformOperator.close();
return actual;
}
use of com.linkedin.pinot.core.common.BlockValSet in project pinot by linkedin.
the class SegmentDumpTool method doMain.
public void doMain(String[] args) throws Exception {
CmdLineParser parser = new CmdLineParser(this);
parser.parseArgument(args);
File segmentDir = new File(segmentPath);
SegmentMetadata metadata = new SegmentMetadataImpl(segmentDir);
// All columns by default
if (columnNames == null) {
columnNames = new ArrayList<String>(metadata.getSchema().getColumnNames());
Collections.sort(columnNames);
}
IndexSegment indexSegment = Loaders.IndexSegment.load(segmentDir, ReadMode.mmap);
Map<String, Dictionary> dictionaries = new HashMap<String, Dictionary>();
Map<String, BlockSingleValIterator> iterators = new HashMap<String, BlockSingleValIterator>();
for (String columnName : columnNames) {
DataSource dataSource = indexSegment.getDataSource(columnName);
dataSource.open();
Block block = dataSource.nextBlock();
BlockValSet blockValSet = block.getBlockValueSet();
BlockSingleValIterator itr = (BlockSingleValIterator) blockValSet.iterator();
iterators.put(columnName, itr);
dictionaries.put(columnName, dataSource.getDictionary());
}
System.out.print("Doc\t");
for (String columnName : columnNames) {
System.out.print(columnName);
System.out.print("\t");
}
System.out.println();
for (int i = 0; i < indexSegment.getSegmentMetadata().getTotalDocs(); i++) {
System.out.print(i);
System.out.print("\t");
for (String columnName : columnNames) {
FieldSpec.DataType columnType = metadata.getSchema().getFieldSpecFor(columnName).getDataType();
BlockSingleValIterator itr = iterators.get(columnName);
Integer encodedValue = itr.nextIntVal();
Object value = dictionaries.get(columnName).get(encodedValue);
System.out.print(value);
System.out.print("\t");
}
System.out.println();
}
if (dumpStarTree) {
System.out.println();
File starTreeFile = new File(segmentDir, V1Constants.STAR_TREE_INDEX_FILE);
StarTreeInterf tree = StarTreeSerDe.fromFile(starTreeFile, ReadMode.mmap);
tree.printTree();
}
}
use of com.linkedin.pinot.core.common.BlockValSet in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test3.
@Test
public void test3() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
final DataSource ds = segment.getDataSource("column7");
final Block bl = ds.nextBlock();
final BlockValSet valSet = bl.getBlockValueSet();
final int maxValue = ((SegmentMetadataImpl) segment.getSegmentMetadata()).getColumnMetadataFor("column7").getMaxNumberOfMultiValues();
final BlockMultiValIterator it = (BlockMultiValIterator) valSet.iterator();
while (it.hasNext()) {
final int[] entry = new int[maxValue];
it.nextIntVal(entry);
LOGGER.trace(Arrays.toString(entry));
}
}
use of com.linkedin.pinot.core.common.BlockValSet in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test2.
@Test
public void test2() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
// System.out.println("INdex dir:" + INDEX_DIR);
final DataSource ds = segment.getDataSource("column1");
final Block bl = ds.nextBlock();
final BlockValSet valSet = bl.getBlockValueSet();
final BlockSingleValIterator it = (BlockSingleValIterator) valSet.iterator();
// TODO: FIXME - load segment with known data and verify that it exists
while (it.hasNext()) {
LOGGER.trace(Integer.toString(it.nextIntVal()));
}
}
Aggregations