use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.
the class AbstractQueryExecutor method getBlockExecutionInfoForBlock.
/**
* Below method will be used to get the block execution info which is
* required to execute any block based on query model
*
* @param queryModel query model from user query
* @param blockIndex block index
* @return block execution info
* @throws QueryExecutionException any failure during block info creation
*/
protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel, AbstractIndex blockIndex, int startBlockletIndex, int numberOfBlockletToScan, String filePath) throws QueryExecutionException {
BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
KeyGenerator blockKeyGenerator = segmentProperties.getDimensionKeyGenerator();
// below is to get only those dimension in query which is present in the
// table block
List<QueryDimension> currentBlockQueryDimensions = RestructureUtil.createDimensionInfoAndGetCurrentBlockQueryDimension(blockExecutionInfo, queryModel.getQueryDimension(), tableBlockDimensions, segmentProperties.getComplexDimensions());
int tableFactPathLength = CarbonStorePath.getCarbonTablePath(queryModel.getAbsoluteTableIdentifier().getStorePath(), queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier()).getFactDir().length() + 1;
blockExecutionInfo.setBlockId(filePath.substring(tableFactPathLength));
blockExecutionInfo.setStartBlockletIndex(startBlockletIndex);
blockExecutionInfo.setNumberOfBlockletToScan(numberOfBlockletToScan);
blockExecutionInfo.setQueryDimensions(currentBlockQueryDimensions.toArray(new QueryDimension[currentBlockQueryDimensions.size()]));
// get measures present in the current block
List<QueryMeasure> currentBlockQueryMeasures = getCurrentBlockQueryMeasures(blockExecutionInfo, queryModel, blockIndex);
blockExecutionInfo.setQueryMeasures(currentBlockQueryMeasures.toArray(new QueryMeasure[currentBlockQueryMeasures.size()]));
blockExecutionInfo.setDataBlock(blockIndex);
blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
// setting whether raw record query or not
blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
// total number dimension
blockExecutionInfo.setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
blockExecutionInfo.setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
blockExecutionInfo.setAbsoluteTableIdentifier(queryModel.getAbsoluteTableIdentifier());
blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil.getComplexDimensionsMap(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), segmentProperties.getEachComplexDimColumnValueSize(), queryProperties.columnToDictionayMapping, queryProperties.complexFilterDimension));
IndexKey startIndexKey = null;
IndexKey endIndexKey = null;
if (null != queryModel.getFilterExpressionResolverTree()) {
// loading the filter executer tree for filter evaluation
blockExecutionInfo.setFilterExecuterTree(FilterUtil.getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties, blockExecutionInfo.getComlexDimensionInfoMap()));
List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties, queryModel.getFilterExpressionResolverTree(), listOfStartEndKeys);
startIndexKey = listOfStartEndKeys.get(0);
endIndexKey = listOfStartEndKeys.get(1);
} else {
try {
startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
} catch (KeyGenException e) {
throw new QueryExecutionException(e);
}
}
//setting the start index key of the block node
blockExecutionInfo.setStartKey(startIndexKey);
//setting the end index key of the block node
blockExecutionInfo.setEndKey(endIndexKey);
// expression dimensions
List<CarbonDimension> expressionDimensions = new ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// expression measure
List<CarbonMeasure> expressionMeasures = new ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// setting all the dimension chunk indexes to be read from file
int numberOfElementToConsider = 0;
// list of dimensions to be projected
Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
// create a list of filter dimensions present in the current block
Set<CarbonDimension> currentBlockFilterDimensions = getCurrentBlockFilterDimensions(queryProperties.complexFilterDimension, segmentProperties);
int[] dimensionsBlockIndexes = QueryUtil.getDimensionsBlockIndexes(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), expressionDimensions, currentBlockFilterDimensions, allProjectionListDimensionIdexes);
int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance().getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO, CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE));
if (dimensionsBlockIndexes.length > 0) {
numberOfElementToConsider = dimensionsBlockIndexes[dimensionsBlockIndexes.length - 1] == segmentProperties.getBlockTodimensionOrdinalMapping().size() - 1 ? dimensionsBlockIndexes.length - 1 : dimensionsBlockIndexes.length;
blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(CarbonUtil.getRangeIndex(dimensionsBlockIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
} else {
blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(new int[0][0]);
}
// get the list of updated filter measures present in the current block
Set<CarbonMeasure> currentBlockFilterMeasures = getCurrentBlockFilterMeasures(queryProperties.filterMeasures, segmentProperties);
// list of measures to be projected
List<Integer> allProjectionListMeasureIndexes = new ArrayList<>();
int[] measureBlockIndexes = QueryUtil.getMeasureBlockIndexes(currentBlockQueryMeasures, expressionMeasures, segmentProperties.getMeasuresOrdinalToBlockMapping(), currentBlockFilterMeasures, allProjectionListMeasureIndexes);
if (measureBlockIndexes.length > 0) {
numberOfElementToConsider = measureBlockIndexes[measureBlockIndexes.length - 1] == segmentProperties.getMeasures().size() - 1 ? measureBlockIndexes.length - 1 : measureBlockIndexes.length;
// setting all the measure chunk indexes to be read from file
blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(CarbonUtil.getRangeIndex(measureBlockIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
} else {
blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(new int[0][0]);
}
// setting the indexes of list of dimension in projection list
blockExecutionInfo.setProjectionListDimensionIndexes(ArrayUtils.toPrimitive(allProjectionListDimensionIdexes.toArray(new Integer[allProjectionListDimensionIdexes.size()])));
// setting the indexes of list of measures in projection list
blockExecutionInfo.setProjectionListMeasureIndexes(ArrayUtils.toPrimitive(allProjectionListMeasureIndexes.toArray(new Integer[allProjectionListMeasureIndexes.size()])));
// setting the size of fixed key column (dictionary column)
blockExecutionInfo.setFixedLengthKeySize(getKeySize(currentBlockQueryDimensions, segmentProperties));
Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
// get the block index to be read from file for query dimension
// for both dictionary columns and no dictionary columns
QueryUtil.fillQueryDimensionsBlockIndexes(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex, noDictionaryColumnBlockIndex);
int[] queryDictionaryColumnBlockIndexes = ArrayUtils.toPrimitive(dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
// need to sort the dictionary column as for all dimension
// column key will be filled based on key order
Arrays.sort(queryDictionaryColumnBlockIndexes);
blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaryColumnBlockIndexes);
// setting the no dictionary column block indexes
blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(noDictionaryColumnBlockIndex.toArray(new Integer[noDictionaryColumnBlockIndex.size()])));
// setting column id to dictionary mapping
blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
// setting each column value size
blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
blockExecutionInfo.setComplexColumnParentBlockIndexes(getComplexDimensionParentBlockIndexes(currentBlockQueryDimensions));
blockExecutionInfo.setVectorBatchCollector(queryModel.isVectorReader());
try {
// to set column group and its key structure info which will be used
// to
// for getting the column group column data in case of final row
// and in case of dimension aggregation
blockExecutionInfo.setColumnGroupToKeyStructureInfo(QueryUtil.getColumnGroupKeyStructureInfo(currentBlockQueryDimensions, segmentProperties));
} catch (KeyGenException e) {
throw new QueryExecutionException(e);
}
// set actual query dimensions and measures. It may differ in case of restructure scenarios
blockExecutionInfo.setActualQueryDimensions(queryModel.getQueryDimension().toArray(new QueryDimension[queryModel.getQueryDimension().size()]));
blockExecutionInfo.setActualQueryMeasures(queryModel.getQueryMeasures().toArray(new QueryMeasure[queryModel.getQueryMeasures().size()]));
return blockExecutionInfo;
}
use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.
the class CarbonFooterWriterTest method testReadFactMetadata.
/**
* test writing fact metadata.
*/
@Test
public void testReadFactMetadata() throws IOException {
deleteFile();
createFile();
CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
List<BlockletInfoColumnar> infoColumnars = getBlockletInfoColumnars();
int[] cardinalities = new int[] { 2, 4, 5, 7, 9, 10 };
List<ColumnSchema> columnSchema = Arrays.asList(new ColumnSchema[] { getDimensionColumn("IMEI1"), getDimensionColumn("IMEI2"), getDimensionColumn("IMEI3"), getDimensionColumn("IMEI4"), getDimensionColumn("IMEI5"), getDimensionColumn("IMEI6") });
List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> wrapperColumnSchema = Arrays.asList(new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema[] { getWrapperDimensionColumn("IMEI1"), getWrapperDimensionColumn("IMEI2"), getWrapperDimensionColumn("IMEI3"), getWrapperDimensionColumn("IMEI4"), getWrapperDimensionColumn("IMEI5"), getWrapperDimensionColumn("IMEI6") });
int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, cardinalities);
writer.writeFooter(CarbonMetadataUtil.convertFileFooter(infoColumnars, 6, colCardinality, columnSchema, segmentProperties), 0);
CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
List<BlockletInfoColumnar> nodeInfoColumnars = CarbonMetadataUtil.convertBlockletInfo(metaDataReader.readFooter());
assertTrue(nodeInfoColumnars.size() == infoColumnars.size());
}
use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.
the class FilterUtilTest method testPrepareDefaultEndIndexKey.
@Test
public void testPrepareDefaultEndIndexKey() throws Exception {
List<ColumnSchema> columnsInTable = new ArrayList<>();
columnsInTable.add(columnSchema);
int[] columnCardinality = new int[] { 1, 2 };
new MockUp<ColumnSchema>() {
@Mock
public List<Encoding> getEncodingList() {
List<Encoding> encodingList = new ArrayList<>();
encodingList.add(Encoding.DICTIONARY);
return encodingList;
}
};
SegmentProperties segmentProperties = new SegmentProperties(columnsInTable, columnCardinality);
assertTrue(FilterUtil.prepareDefaultEndIndexKey(segmentProperties) instanceof IndexKey);
}
use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.
the class CarbonStreamRecordReader method initializeFilter.
private void initializeFilter() {
List<ColumnSchema> wrapperColumnSchemaList = CarbonUtil.getColumnSchemaList(carbonTable.getDimensionByTableName(carbonTable.getTableName()), carbonTable.getMeasureByTableName(carbonTable.getTableName()));
int[] dimLensWithComplex = new int[wrapperColumnSchemaList.size()];
for (int i = 0; i < dimLensWithComplex.length; i++) {
dimLensWithComplex[i] = Integer.MAX_VALUE;
}
int[] dictionaryColumnCardinality = CarbonUtil.getFormattedCardinality(dimLensWithComplex, wrapperColumnSchemaList);
SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchemaList, dictionaryColumnCardinality);
Map<Integer, GenericQueryType> complexDimensionInfoMap = new HashMap<>();
FilterResolverIntf resolverIntf = model.getFilterExpressionResolverTree();
filter = FilterUtil.getFilterExecuterTree(resolverIntf, segmentProperties, complexDimensionInfoMap);
// for row filter, we need update column index
FilterUtil.updateIndexOfColumnExpression(resolverIntf.getFilterExpression(), carbonTable.getDimensionOrdinalMax());
}
use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.
the class SegmentTaskIndexStore method loadBlocks.
/**
* Below method will be used to load the blocks
*
* @param tableBlockInfoList
* @return loaded segment
* @throws IOException
*/
private AbstractIndex loadBlocks(TaskBucketHolder taskBucketHolder, List<TableBlockInfo> tableBlockInfoList, AbsoluteTableIdentifier tableIdentifier) throws IOException {
// all the block of one task id will be loaded together
// so creating a list which will have all the data file meta data to of one task
List<DataFileFooter> footerList = CarbonUtil.readCarbonIndexFile(taskBucketHolder.taskNo, taskBucketHolder.bucketNumber, tableBlockInfoList, tableIdentifier);
// Reuse SegmentProperties object if tableIdentifier, columnsInTable and columnCardinality are
// the same.
List<ColumnSchema> columnsInTable = footerList.get(0).getColumnInTable();
int[] columnCardinality = footerList.get(0).getSegmentInfo().getColumnCardinality();
SegmentPropertiesWrapper segmentPropertiesWrapper = new SegmentPropertiesWrapper(tableIdentifier, columnsInTable, columnCardinality);
SegmentProperties segmentProperties;
if (this.segmentProperties.containsKey(segmentPropertiesWrapper)) {
segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
} else {
// create a metadata details
// this will be useful in query handling
// all the data file metadata will have common segment properties we
// can use first one to get create the segment properties
segmentProperties = new SegmentProperties(columnsInTable, columnCardinality);
this.segmentProperties.put(segmentPropertiesWrapper, segmentProperties);
}
AbstractIndex segment = new SegmentTaskIndex(segmentProperties);
// file path of only first block is passed as it all table block info path of
// same task id will be same
segment.buildIndex(footerList);
return segment;
}
Aggregations