use of org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode in project carbondata by apache.
the class AbstractDetailQueryResultIterator method intialiseInfos.
private void intialiseInfos() {
for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
Map<String, DeleteDeltaVo> deletedRowsMap = null;
DataRefNodeFinder finder = new BTreeDataRefNodeFinder(blockInfo.getEachColumnValueSize(), blockInfo.getDataBlock().getSegmentProperties().getNumberOfSortColumns(), blockInfo.getDataBlock().getSegmentProperties().getNumberOfNoDictSortColumns());
// if delete delta file is present
if (null != blockInfo.getDeleteDeltaFilePath() && 0 != blockInfo.getDeleteDeltaFilePath().length) {
DeleteDeltaInfo deleteDeltaInfo = new DeleteDeltaInfo(blockInfo.getDeleteDeltaFilePath());
// read and get the delete detail block details
deletedRowsMap = getDeleteDeltaDetails(blockInfo.getDataBlock(), deleteDeltaInfo);
// set the deleted row to block execution info
blockInfo.setDeletedRecordsMap(deletedRowsMap);
}
DataRefNode dataRefNode = blockInfo.getDataBlock().getDataRefNode();
if (dataRefNode instanceof BlockletDataRefNode) {
BlockletDataRefNode node = (BlockletDataRefNode) dataRefNode;
blockInfo.setFirstDataBlock(node);
blockInfo.setNumberOfBlockToScan(node.numberOfNodes());
} else {
DataRefNode startDataBlock = finder.findFirstDataBlock(dataRefNode, blockInfo.getStartKey());
while (startDataBlock.nodeIndex() < blockInfo.getStartBlockletIndex()) {
startDataBlock = startDataBlock.getNextDataRefNode();
}
long numberOfBlockToScan = blockInfo.getNumberOfBlockletToScan();
// if number of block is less than 0 then take end block.
if (numberOfBlockToScan <= 0) {
DataRefNode endDataBlock = finder.findLastDataBlock(dataRefNode, blockInfo.getEndKey());
numberOfBlockToScan = endDataBlock.nodeIndex() - startDataBlock.nodeIndex() + 1;
}
blockInfo.setFirstDataBlock(startDataBlock);
blockInfo.setNumberOfBlockToScan(numberOfBlockToScan);
}
}
}
use of org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode in project carbondata by apache.
the class AbstractQueryExecutor method getBlockExecutionInfos.
protected List<BlockExecutionInfo> getBlockExecutionInfos(QueryModel queryModel) throws IOException {
initQuery(queryModel);
List<BlockExecutionInfo> blockExecutionInfoList = new ArrayList<BlockExecutionInfo>();
// fill all the block execution infos for all the blocks selected in
// query
// and query will be executed based on that infos
ReusableDataBuffer[] dimensionReusableDataBuffers = null;
ReusableDataBuffer[] measureReusableDataBuffers = null;
for (int i = 0; i < queryProperties.dataBlocks.size(); i++) {
AbstractIndex abstractIndex = queryProperties.dataBlocks.get(i);
BlockletDataRefNode dataRefNode = (BlockletDataRefNode) abstractIndex.getDataRefNode();
final BlockExecutionInfo blockExecutionInfoForBlock = getBlockExecutionInfoForBlock(queryModel, abstractIndex, dataRefNode.numberOfNodes(), dataRefNode.getTableBlockInfo().getFilePath(), dataRefNode.getTableBlockInfo().getDeletedDeltaFilePath(), dataRefNode.getTableBlockInfo().getSegment());
if (null == dimensionReusableDataBuffers || null == measureReusableDataBuffers) {
dimensionReusableDataBuffers = blockExecutionInfoForBlock.getDimensionReusableDataBuffer();
measureReusableDataBuffers = blockExecutionInfoForBlock.getMeasureReusableDataBuffer();
} else {
if (dimensionReusableDataBuffers.length == blockExecutionInfoForBlock.getDimensionReusableDataBuffer().length) {
blockExecutionInfoForBlock.setDimensionReusableDataBuffer(dimensionReusableDataBuffers);
}
if (measureReusableDataBuffers.length == blockExecutionInfoForBlock.getMeasureReusableDataBuffer().length) {
blockExecutionInfoForBlock.setMeasureReusableDataBuffer(measureReusableDataBuffers);
}
}
blockExecutionInfoList.add(blockExecutionInfoForBlock);
}
if (null != queryModel.getStatisticsRecorder()) {
QueryStatistic queryStatistic = new QueryStatistic();
queryStatistic.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKS_NUM, blockExecutionInfoList.size());
queryModel.getStatisticsRecorder().recordStatistics(queryStatistic);
}
return blockExecutionInfoList;
}
use of org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode in project carbondata by apache.
the class AbstractDetailQueryResultIterator method initialiseInfos.
private void initialiseInfos() {
for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
Map<String, DeleteDeltaVo> deletedRowsMap = null;
// if delete delta file is present
if (null != blockInfo.getDeleteDeltaFilePath() && 0 != blockInfo.getDeleteDeltaFilePath().length) {
DeleteDeltaInfo deleteDeltaInfo = new DeleteDeltaInfo(blockInfo.getDeleteDeltaFilePath());
// read and get the delete detail block details
deletedRowsMap = getDeleteDeltaDetails(blockInfo.getDataBlock(), deleteDeltaInfo);
// set the deleted row to block execution info
blockInfo.setDeletedRecordsMap(deletedRowsMap);
}
DataRefNode dataRefNode = blockInfo.getDataBlock().getDataRefNode();
assert (dataRefNode instanceof BlockletDataRefNode);
BlockletDataRefNode node = (BlockletDataRefNode) dataRefNode;
blockInfo.setFirstDataBlock(node);
blockInfo.setNumberOfBlockToScan(node.numberOfNodes());
}
}
use of org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode in project carbondata by apache.
the class FilterUtilTest method testCreateBitSetGroupWithColumnChunk.
@Test
public void testCreateBitSetGroupWithColumnChunk() {
BlockletDataRefNode blockletDataRefNode = new MockUp<BlockletDataRefNode>() {
@Mock
public int numberOfPages() {
return 2;
}
@Mock
public int getPageRowCount(int pageNumber) {
if (pageNumber == 0) {
return 94;
} else {
return 6;
}
}
}.getMockInstance();
RawBlockletColumnChunks rawBlockletColumnChunks = new MockUp<RawBlockletColumnChunks>() {
@Mock
public DataRefNode getDataBlock() {
return blockletDataRefNode;
}
}.getMockInstance();
BitSetGroup bitSetGroupWithColumnChunk = FilterUtil.createBitSetGroupWithColumnChunk(rawBlockletColumnChunks, true);
assertTrue(bitSetGroupWithColumnChunk.getNumberOfPages() == 2);
}
Aggregations