Search in sources :

Example 1 with SegmentProperties

use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.

the class AbstractQueryExecutor method getBlockExecutionInfoForBlock.

/**
   * Below method will be used to get the block execution info which is
   * required to execute any block  based on query model
   *
   * @param queryModel query model from user query
   * @param blockIndex block index
   * @return block execution info
   * @throws QueryExecutionException any failure during block info creation
   */
protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel, AbstractIndex blockIndex, int startBlockletIndex, int numberOfBlockletToScan, String filePath) throws QueryExecutionException {
    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
    SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
    List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
    KeyGenerator blockKeyGenerator = segmentProperties.getDimensionKeyGenerator();
    // below is to get only those dimension in query which is present in the
    // table block
    List<QueryDimension> currentBlockQueryDimensions = RestructureUtil.createDimensionInfoAndGetCurrentBlockQueryDimension(blockExecutionInfo, queryModel.getQueryDimension(), tableBlockDimensions, segmentProperties.getComplexDimensions());
    int tableFactPathLength = CarbonStorePath.getCarbonTablePath(queryModel.getAbsoluteTableIdentifier().getStorePath(), queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier()).getFactDir().length() + 1;
    blockExecutionInfo.setBlockId(filePath.substring(tableFactPathLength));
    blockExecutionInfo.setStartBlockletIndex(startBlockletIndex);
    blockExecutionInfo.setNumberOfBlockletToScan(numberOfBlockletToScan);
    blockExecutionInfo.setQueryDimensions(currentBlockQueryDimensions.toArray(new QueryDimension[currentBlockQueryDimensions.size()]));
    // get measures present in the current block
    List<QueryMeasure> currentBlockQueryMeasures = getCurrentBlockQueryMeasures(blockExecutionInfo, queryModel, blockIndex);
    blockExecutionInfo.setQueryMeasures(currentBlockQueryMeasures.toArray(new QueryMeasure[currentBlockQueryMeasures.size()]));
    blockExecutionInfo.setDataBlock(blockIndex);
    blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
    // setting whether raw record query or not
    blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
    // total number dimension
    blockExecutionInfo.setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
    blockExecutionInfo.setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
    blockExecutionInfo.setAbsoluteTableIdentifier(queryModel.getAbsoluteTableIdentifier());
    blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil.getComplexDimensionsMap(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), segmentProperties.getEachComplexDimColumnValueSize(), queryProperties.columnToDictionayMapping, queryProperties.complexFilterDimension));
    IndexKey startIndexKey = null;
    IndexKey endIndexKey = null;
    if (null != queryModel.getFilterExpressionResolverTree()) {
        // loading the filter executer tree for filter evaluation
        blockExecutionInfo.setFilterExecuterTree(FilterUtil.getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties, blockExecutionInfo.getComlexDimensionInfoMap()));
        List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
        FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties, queryModel.getFilterExpressionResolverTree(), listOfStartEndKeys);
        startIndexKey = listOfStartEndKeys.get(0);
        endIndexKey = listOfStartEndKeys.get(1);
    } else {
        try {
            startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
            endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
        } catch (KeyGenException e) {
            throw new QueryExecutionException(e);
        }
    }
    //setting the start index key of the block node
    blockExecutionInfo.setStartKey(startIndexKey);
    //setting the end index key of the block node
    blockExecutionInfo.setEndKey(endIndexKey);
    // expression dimensions
    List<CarbonDimension> expressionDimensions = new ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // expression measure
    List<CarbonMeasure> expressionMeasures = new ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // setting all the dimension chunk indexes to be read from file
    int numberOfElementToConsider = 0;
    // list of dimensions to be projected
    Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
    // create a list of filter dimensions present in the current block
    Set<CarbonDimension> currentBlockFilterDimensions = getCurrentBlockFilterDimensions(queryProperties.complexFilterDimension, segmentProperties);
    int[] dimensionsBlockIndexes = QueryUtil.getDimensionsBlockIndexes(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), expressionDimensions, currentBlockFilterDimensions, allProjectionListDimensionIdexes);
    int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance().getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO, CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE));
    if (dimensionsBlockIndexes.length > 0) {
        numberOfElementToConsider = dimensionsBlockIndexes[dimensionsBlockIndexes.length - 1] == segmentProperties.getBlockTodimensionOrdinalMapping().size() - 1 ? dimensionsBlockIndexes.length - 1 : dimensionsBlockIndexes.length;
        blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(CarbonUtil.getRangeIndex(dimensionsBlockIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
    } else {
        blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(new int[0][0]);
    }
    // get the list of updated filter measures present in the current block
    Set<CarbonMeasure> currentBlockFilterMeasures = getCurrentBlockFilterMeasures(queryProperties.filterMeasures, segmentProperties);
    // list of measures to be projected
    List<Integer> allProjectionListMeasureIndexes = new ArrayList<>();
    int[] measureBlockIndexes = QueryUtil.getMeasureBlockIndexes(currentBlockQueryMeasures, expressionMeasures, segmentProperties.getMeasuresOrdinalToBlockMapping(), currentBlockFilterMeasures, allProjectionListMeasureIndexes);
    if (measureBlockIndexes.length > 0) {
        numberOfElementToConsider = measureBlockIndexes[measureBlockIndexes.length - 1] == segmentProperties.getMeasures().size() - 1 ? measureBlockIndexes.length - 1 : measureBlockIndexes.length;
        // setting all the measure chunk indexes to be read from file
        blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(CarbonUtil.getRangeIndex(measureBlockIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
    } else {
        blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(new int[0][0]);
    }
    // setting the indexes of list of dimension in projection list
    blockExecutionInfo.setProjectionListDimensionIndexes(ArrayUtils.toPrimitive(allProjectionListDimensionIdexes.toArray(new Integer[allProjectionListDimensionIdexes.size()])));
    // setting the indexes of list of measures in projection list
    blockExecutionInfo.setProjectionListMeasureIndexes(ArrayUtils.toPrimitive(allProjectionListMeasureIndexes.toArray(new Integer[allProjectionListMeasureIndexes.size()])));
    // setting the size of fixed key column (dictionary column)
    blockExecutionInfo.setFixedLengthKeySize(getKeySize(currentBlockQueryDimensions, segmentProperties));
    Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
    // get the block index to be read from file for query dimension
    // for both dictionary columns and no dictionary columns
    QueryUtil.fillQueryDimensionsBlockIndexes(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex, noDictionaryColumnBlockIndex);
    int[] queryDictionaryColumnBlockIndexes = ArrayUtils.toPrimitive(dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
    // need to sort the dictionary column as for all dimension
    // column key will be filled based on key order
    Arrays.sort(queryDictionaryColumnBlockIndexes);
    blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaryColumnBlockIndexes);
    // setting the no dictionary column block indexes
    blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(noDictionaryColumnBlockIndex.toArray(new Integer[noDictionaryColumnBlockIndex.size()])));
    // setting column id to dictionary mapping
    blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
    // setting each column value size
    blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
    blockExecutionInfo.setComplexColumnParentBlockIndexes(getComplexDimensionParentBlockIndexes(currentBlockQueryDimensions));
    blockExecutionInfo.setVectorBatchCollector(queryModel.isVectorReader());
    try {
        // to set column group and its key structure info which will be used
        // to
        // for getting the column group column data in case of final row
        // and in case of dimension aggregation
        blockExecutionInfo.setColumnGroupToKeyStructureInfo(QueryUtil.getColumnGroupKeyStructureInfo(currentBlockQueryDimensions, segmentProperties));
    } catch (KeyGenException e) {
        throw new QueryExecutionException(e);
    }
    // set actual query dimensions and measures. It may differ in case of restructure scenarios
    blockExecutionInfo.setActualQueryDimensions(queryModel.getQueryDimension().toArray(new QueryDimension[queryModel.getQueryDimension().size()]));
    blockExecutionInfo.setActualQueryMeasures(queryModel.getQueryMeasures().toArray(new QueryMeasure[queryModel.getQueryMeasures().size()]));
    return blockExecutionInfo;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) IndexKey(org.apache.carbondata.core.datastore.IndexKey) ArrayList(java.util.ArrayList) QueryMeasure(org.apache.carbondata.core.scan.model.QueryMeasure) BlockExecutionInfo(org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo) KeyGenerator(org.apache.carbondata.core.keygenerator.KeyGenerator) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) CarbonDimension(org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException) CarbonMeasure(org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure) KeyGenException(org.apache.carbondata.core.keygenerator.KeyGenException) SegmentProperties(org.apache.carbondata.core.datastore.block.SegmentProperties) QueryDimension(org.apache.carbondata.core.scan.model.QueryDimension)

Example 2 with SegmentProperties

use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.

the class CarbonFooterWriterTest method testReadFactMetadata.

/**
   * test writing fact metadata.
   */
@Test
public void testReadFactMetadata() throws IOException {
    deleteFile();
    createFile();
    CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
    List<BlockletInfoColumnar> infoColumnars = getBlockletInfoColumnars();
    int[] cardinalities = new int[] { 2, 4, 5, 7, 9, 10 };
    List<ColumnSchema> columnSchema = Arrays.asList(new ColumnSchema[] { getDimensionColumn("IMEI1"), getDimensionColumn("IMEI2"), getDimensionColumn("IMEI3"), getDimensionColumn("IMEI4"), getDimensionColumn("IMEI5"), getDimensionColumn("IMEI6") });
    List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> wrapperColumnSchema = Arrays.asList(new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema[] { getWrapperDimensionColumn("IMEI1"), getWrapperDimensionColumn("IMEI2"), getWrapperDimensionColumn("IMEI3"), getWrapperDimensionColumn("IMEI4"), getWrapperDimensionColumn("IMEI5"), getWrapperDimensionColumn("IMEI6") });
    int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
    SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, cardinalities);
    writer.writeFooter(CarbonMetadataUtil.convertFileFooter(infoColumnars, 6, colCardinality, columnSchema, segmentProperties), 0);
    CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
    List<BlockletInfoColumnar> nodeInfoColumnars = CarbonMetadataUtil.convertBlockletInfo(metaDataReader.readFooter());
    assertTrue(nodeInfoColumnars.size() == infoColumnars.size());
}
Also used : BlockletInfoColumnar(org.apache.carbondata.core.metadata.BlockletInfoColumnar) ColumnSchema(org.apache.carbondata.format.ColumnSchema) SegmentProperties(org.apache.carbondata.core.datastore.block.SegmentProperties) CarbonFooterReader(org.apache.carbondata.core.reader.CarbonFooterReader) Test(org.junit.Test)

Example 3 with SegmentProperties

use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.

the class FilterUtilTest method testPrepareDefaultEndIndexKey.

@Test
public void testPrepareDefaultEndIndexKey() throws Exception {
    List<ColumnSchema> columnsInTable = new ArrayList<>();
    columnsInTable.add(columnSchema);
    int[] columnCardinality = new int[] { 1, 2 };
    new MockUp<ColumnSchema>() {

        @Mock
        public List<Encoding> getEncodingList() {
            List<Encoding> encodingList = new ArrayList<>();
            encodingList.add(Encoding.DICTIONARY);
            return encodingList;
        }
    };
    SegmentProperties segmentProperties = new SegmentProperties(columnsInTable, columnCardinality);
    assertTrue(FilterUtil.prepareDefaultEndIndexKey(segmentProperties) instanceof IndexKey);
}
Also used : IndexKey(org.apache.carbondata.core.datastore.IndexKey) ArrayList(java.util.ArrayList) ColumnSchema(org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema) MockUp(mockit.MockUp) Encoding(org.apache.carbondata.core.metadata.encoder.Encoding) SegmentProperties(org.apache.carbondata.core.datastore.block.SegmentProperties) Test(org.junit.Test) AbstractDictionaryCacheTest(org.apache.carbondata.core.cache.dictionary.AbstractDictionaryCacheTest)

Example 4 with SegmentProperties

use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.

the class CarbonStreamRecordReader method initializeFilter.

private void initializeFilter() {
    List<ColumnSchema> wrapperColumnSchemaList = CarbonUtil.getColumnSchemaList(carbonTable.getDimensionByTableName(carbonTable.getTableName()), carbonTable.getMeasureByTableName(carbonTable.getTableName()));
    int[] dimLensWithComplex = new int[wrapperColumnSchemaList.size()];
    for (int i = 0; i < dimLensWithComplex.length; i++) {
        dimLensWithComplex[i] = Integer.MAX_VALUE;
    }
    int[] dictionaryColumnCardinality = CarbonUtil.getFormattedCardinality(dimLensWithComplex, wrapperColumnSchemaList);
    SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchemaList, dictionaryColumnCardinality);
    Map<Integer, GenericQueryType> complexDimensionInfoMap = new HashMap<>();
    FilterResolverIntf resolverIntf = model.getFilterExpressionResolverTree();
    filter = FilterUtil.getFilterExecuterTree(resolverIntf, segmentProperties, complexDimensionInfoMap);
    // for row filter, we need update column index
    FilterUtil.updateIndexOfColumnExpression(resolverIntf.getFilterExpression(), carbonTable.getDimensionOrdinalMax());
}
Also used : BigInteger(java.math.BigInteger) HashMap(java.util.HashMap) GenericQueryType(org.apache.carbondata.core.scan.filter.GenericQueryType) ColumnSchema(org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema) SegmentProperties(org.apache.carbondata.core.datastore.block.SegmentProperties) FilterResolverIntf(org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf)

Example 5 with SegmentProperties

use of org.apache.carbondata.core.datastore.block.SegmentProperties in project carbondata by apache.

the class SegmentTaskIndexStore method loadBlocks.

/**
 * Below method will be used to load the blocks
 *
 * @param tableBlockInfoList
 * @return loaded segment
 * @throws IOException
 */
private AbstractIndex loadBlocks(TaskBucketHolder taskBucketHolder, List<TableBlockInfo> tableBlockInfoList, AbsoluteTableIdentifier tableIdentifier) throws IOException {
    // all the block of one task id will be loaded together
    // so creating a list which will have all the data file meta data to of one task
    List<DataFileFooter> footerList = CarbonUtil.readCarbonIndexFile(taskBucketHolder.taskNo, taskBucketHolder.bucketNumber, tableBlockInfoList, tableIdentifier);
    // Reuse SegmentProperties object if tableIdentifier, columnsInTable and columnCardinality are
    // the same.
    List<ColumnSchema> columnsInTable = footerList.get(0).getColumnInTable();
    int[] columnCardinality = footerList.get(0).getSegmentInfo().getColumnCardinality();
    SegmentPropertiesWrapper segmentPropertiesWrapper = new SegmentPropertiesWrapper(tableIdentifier, columnsInTable, columnCardinality);
    SegmentProperties segmentProperties;
    if (this.segmentProperties.containsKey(segmentPropertiesWrapper)) {
        segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
    } else {
        // create a metadata details
        // this will be useful in query handling
        // all the data file metadata will have common segment properties we
        // can use first one to get create the segment properties
        segmentProperties = new SegmentProperties(columnsInTable, columnCardinality);
        this.segmentProperties.put(segmentPropertiesWrapper, segmentProperties);
    }
    AbstractIndex segment = new SegmentTaskIndex(segmentProperties);
    // file path of only first block is passed as it all table block info path of
    // same task id will be same
    segment.buildIndex(footerList);
    return segment;
}
Also used : DataFileFooter(org.apache.carbondata.core.metadata.blocklet.DataFileFooter) AbstractIndex(org.apache.carbondata.core.datastore.block.AbstractIndex) ColumnSchema(org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema) SegmentProperties(org.apache.carbondata.core.datastore.block.SegmentProperties) SegmentTaskIndex(org.apache.carbondata.core.datastore.block.SegmentTaskIndex)

Aggregations

SegmentProperties (org.apache.carbondata.core.datastore.block.SegmentProperties)19 ArrayList (java.util.ArrayList)11 ColumnSchema (org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema)8 IndexKey (org.apache.carbondata.core.datastore.IndexKey)6 HashMap (java.util.HashMap)5 Test (org.junit.Test)5 KeyGenException (org.apache.carbondata.core.keygenerator.KeyGenException)4 CarbonDimension (org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension)4 Map (java.util.Map)3 MockUp (mockit.MockUp)3 DataFileFooter (org.apache.carbondata.core.metadata.blocklet.DataFileFooter)3 ColumnSchema (org.apache.carbondata.format.ColumnSchema)3 HashSet (java.util.HashSet)2 LinkedHashSet (java.util.LinkedHashSet)2 AbstractDictionaryCacheTest (org.apache.carbondata.core.cache.dictionary.AbstractDictionaryCacheTest)2 DataRefNode (org.apache.carbondata.core.datastore.DataRefNode)2 DataRefNodeFinder (org.apache.carbondata.core.datastore.DataRefNodeFinder)2 TableBlockInfo (org.apache.carbondata.core.datastore.block.TableBlockInfo)2 BTreeDataRefNodeFinder (org.apache.carbondata.core.datastore.impl.btree.BTreeDataRefNodeFinder)2 MultiDimKeyVarLengthGenerator (org.apache.carbondata.core.keygenerator.mdkey.MultiDimKeyVarLengthGenerator)2