Search in sources :

Example 1 with QueryExecutionException

use of org.apache.carbondata.core.scan.executor.exception.QueryExecutionException in project carbondata by apache.

the class AbstractQueryExecutor method getBlockExecutionInfoForBlock.

/**
   * Below method will be used to get the block execution info which is
   * required to execute any block  based on query model
   *
   * @param queryModel query model from user query
   * @param blockIndex block index
   * @return block execution info
   * @throws QueryExecutionException any failure during block info creation
   */
protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel, AbstractIndex blockIndex, int startBlockletIndex, int numberOfBlockletToScan, String filePath) throws QueryExecutionException {
    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
    SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
    List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
    KeyGenerator blockKeyGenerator = segmentProperties.getDimensionKeyGenerator();
    // below is to get only those dimension in query which is present in the
    // table block
    List<QueryDimension> currentBlockQueryDimensions = RestructureUtil.createDimensionInfoAndGetCurrentBlockQueryDimension(blockExecutionInfo, queryModel.getQueryDimension(), tableBlockDimensions, segmentProperties.getComplexDimensions());
    int tableFactPathLength = CarbonStorePath.getCarbonTablePath(queryModel.getAbsoluteTableIdentifier().getStorePath(), queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier()).getFactDir().length() + 1;
    blockExecutionInfo.setBlockId(filePath.substring(tableFactPathLength));
    blockExecutionInfo.setStartBlockletIndex(startBlockletIndex);
    blockExecutionInfo.setNumberOfBlockletToScan(numberOfBlockletToScan);
    blockExecutionInfo.setQueryDimensions(currentBlockQueryDimensions.toArray(new QueryDimension[currentBlockQueryDimensions.size()]));
    // get measures present in the current block
    List<QueryMeasure> currentBlockQueryMeasures = getCurrentBlockQueryMeasures(blockExecutionInfo, queryModel, blockIndex);
    blockExecutionInfo.setQueryMeasures(currentBlockQueryMeasures.toArray(new QueryMeasure[currentBlockQueryMeasures.size()]));
    blockExecutionInfo.setDataBlock(blockIndex);
    blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
    // setting whether raw record query or not
    blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
    // total number dimension
    blockExecutionInfo.setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
    blockExecutionInfo.setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
    blockExecutionInfo.setAbsoluteTableIdentifier(queryModel.getAbsoluteTableIdentifier());
    blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil.getComplexDimensionsMap(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), segmentProperties.getEachComplexDimColumnValueSize(), queryProperties.columnToDictionayMapping, queryProperties.complexFilterDimension));
    IndexKey startIndexKey = null;
    IndexKey endIndexKey = null;
    if (null != queryModel.getFilterExpressionResolverTree()) {
        // loading the filter executer tree for filter evaluation
        blockExecutionInfo.setFilterExecuterTree(FilterUtil.getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties, blockExecutionInfo.getComlexDimensionInfoMap()));
        List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
        FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties, queryModel.getFilterExpressionResolverTree(), listOfStartEndKeys);
        startIndexKey = listOfStartEndKeys.get(0);
        endIndexKey = listOfStartEndKeys.get(1);
    } else {
        try {
            startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
            endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
        } catch (KeyGenException e) {
            throw new QueryExecutionException(e);
        }
    }
    //setting the start index key of the block node
    blockExecutionInfo.setStartKey(startIndexKey);
    //setting the end index key of the block node
    blockExecutionInfo.setEndKey(endIndexKey);
    // expression dimensions
    List<CarbonDimension> expressionDimensions = new ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // expression measure
    List<CarbonMeasure> expressionMeasures = new ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // setting all the dimension chunk indexes to be read from file
    int numberOfElementToConsider = 0;
    // list of dimensions to be projected
    Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
    // create a list of filter dimensions present in the current block
    Set<CarbonDimension> currentBlockFilterDimensions = getCurrentBlockFilterDimensions(queryProperties.complexFilterDimension, segmentProperties);
    int[] dimensionsBlockIndexes = QueryUtil.getDimensionsBlockIndexes(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), expressionDimensions, currentBlockFilterDimensions, allProjectionListDimensionIdexes);
    int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance().getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO, CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE));
    if (dimensionsBlockIndexes.length > 0) {
        numberOfElementToConsider = dimensionsBlockIndexes[dimensionsBlockIndexes.length - 1] == segmentProperties.getBlockTodimensionOrdinalMapping().size() - 1 ? dimensionsBlockIndexes.length - 1 : dimensionsBlockIndexes.length;
        blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(CarbonUtil.getRangeIndex(dimensionsBlockIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
    } else {
        blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(new int[0][0]);
    }
    // get the list of updated filter measures present in the current block
    Set<CarbonMeasure> currentBlockFilterMeasures = getCurrentBlockFilterMeasures(queryProperties.filterMeasures, segmentProperties);
    // list of measures to be projected
    List<Integer> allProjectionListMeasureIndexes = new ArrayList<>();
    int[] measureBlockIndexes = QueryUtil.getMeasureBlockIndexes(currentBlockQueryMeasures, expressionMeasures, segmentProperties.getMeasuresOrdinalToBlockMapping(), currentBlockFilterMeasures, allProjectionListMeasureIndexes);
    if (measureBlockIndexes.length > 0) {
        numberOfElementToConsider = measureBlockIndexes[measureBlockIndexes.length - 1] == segmentProperties.getMeasures().size() - 1 ? measureBlockIndexes.length - 1 : measureBlockIndexes.length;
        // setting all the measure chunk indexes to be read from file
        blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(CarbonUtil.getRangeIndex(measureBlockIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
    } else {
        blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(new int[0][0]);
    }
    // setting the indexes of list of dimension in projection list
    blockExecutionInfo.setProjectionListDimensionIndexes(ArrayUtils.toPrimitive(allProjectionListDimensionIdexes.toArray(new Integer[allProjectionListDimensionIdexes.size()])));
    // setting the indexes of list of measures in projection list
    blockExecutionInfo.setProjectionListMeasureIndexes(ArrayUtils.toPrimitive(allProjectionListMeasureIndexes.toArray(new Integer[allProjectionListMeasureIndexes.size()])));
    // setting the size of fixed key column (dictionary column)
    blockExecutionInfo.setFixedLengthKeySize(getKeySize(currentBlockQueryDimensions, segmentProperties));
    Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
    // get the block index to be read from file for query dimension
    // for both dictionary columns and no dictionary columns
    QueryUtil.fillQueryDimensionsBlockIndexes(currentBlockQueryDimensions, segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex, noDictionaryColumnBlockIndex);
    int[] queryDictionaryColumnBlockIndexes = ArrayUtils.toPrimitive(dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
    // need to sort the dictionary column as for all dimension
    // column key will be filled based on key order
    Arrays.sort(queryDictionaryColumnBlockIndexes);
    blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaryColumnBlockIndexes);
    // setting the no dictionary column block indexes
    blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(noDictionaryColumnBlockIndex.toArray(new Integer[noDictionaryColumnBlockIndex.size()])));
    // setting column id to dictionary mapping
    blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
    // setting each column value size
    blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
    blockExecutionInfo.setComplexColumnParentBlockIndexes(getComplexDimensionParentBlockIndexes(currentBlockQueryDimensions));
    blockExecutionInfo.setVectorBatchCollector(queryModel.isVectorReader());
    try {
        // to set column group and its key structure info which will be used
        // to
        // for getting the column group column data in case of final row
        // and in case of dimension aggregation
        blockExecutionInfo.setColumnGroupToKeyStructureInfo(QueryUtil.getColumnGroupKeyStructureInfo(currentBlockQueryDimensions, segmentProperties));
    } catch (KeyGenException e) {
        throw new QueryExecutionException(e);
    }
    // set actual query dimensions and measures. It may differ in case of restructure scenarios
    blockExecutionInfo.setActualQueryDimensions(queryModel.getQueryDimension().toArray(new QueryDimension[queryModel.getQueryDimension().size()]));
    blockExecutionInfo.setActualQueryMeasures(queryModel.getQueryMeasures().toArray(new QueryMeasure[queryModel.getQueryMeasures().size()]));
    return blockExecutionInfo;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) IndexKey(org.apache.carbondata.core.datastore.IndexKey) ArrayList(java.util.ArrayList) QueryMeasure(org.apache.carbondata.core.scan.model.QueryMeasure) BlockExecutionInfo(org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo) KeyGenerator(org.apache.carbondata.core.keygenerator.KeyGenerator) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) CarbonDimension(org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException) CarbonMeasure(org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure) KeyGenException(org.apache.carbondata.core.keygenerator.KeyGenException) SegmentProperties(org.apache.carbondata.core.datastore.block.SegmentProperties) QueryDimension(org.apache.carbondata.core.scan.model.QueryDimension)

Example 2 with QueryExecutionException

use of org.apache.carbondata.core.scan.executor.exception.QueryExecutionException in project carbondata by apache.

the class CarbondataRecordSet method cursor.

/**
   * get data blocks via Carbondata QueryModel API
   */
@Override
public RecordCursor cursor() {
    List<TableBlockInfo> tableBlockInfoList = new ArrayList<TableBlockInfo>();
    tableBlockInfoList.add(new TableBlockInfo(split.getLocalInputSplit().getPath().toString(), split.getLocalInputSplit().getStart(), split.getLocalInputSplit().getSegmentId(), split.getLocalInputSplit().getLocations().toArray(new String[0]), split.getLocalInputSplit().getLength(), //blockletInfos,
    ColumnarFormatVersion.valueOf(split.getLocalInputSplit().getVersion())));
    queryModel.setTableBlockInfos(tableBlockInfoList);
    queryExecutor = QueryExecutorFactory.getQueryExecutor(queryModel);
    //queryModel.setQueryId(queryModel.getQueryId() + "_" + split.getLocalInputSplit().getSegmentId());
    try {
        readSupport.initialize(queryModel.getProjectionColumns(), queryModel.getAbsoluteTableIdentifier());
        CarbonIterator<Object[]> carbonIterator = new ChunkRowIterator((CarbonIterator<BatchResult>) queryExecutor.execute(queryModel));
        RecordCursor rc = new CarbondataRecordCursor(readSupport, carbonIterator, columns, split);
        return rc;
    } catch (QueryExecutionException e) {
        throw new RuntimeException(e.getMessage(), e);
    } catch (Exception ex) {
        throw new RuntimeException(ex.getMessage(), ex);
    }
}
Also used : TableBlockInfo(org.apache.carbondata.core.datastore.block.TableBlockInfo) ChunkRowIterator(org.apache.carbondata.core.scan.result.iterator.ChunkRowIterator) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException) ArrayList(java.util.ArrayList) BatchResult(org.apache.carbondata.core.scan.result.BatchResult) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException)

Example 3 with QueryExecutionException

use of org.apache.carbondata.core.scan.executor.exception.QueryExecutionException in project carbondata by apache.

the class CarbonHiveRecordReader method initialize.

public void initialize(InputSplit inputSplit, Configuration conf) throws IOException {
    // The input split can contain single HDFS block or multiple blocks, so firstly get all the
    // blocks and then set them in the query model.
    List<CarbonHiveInputSplit> splitList;
    if (inputSplit instanceof CarbonHiveInputSplit) {
        splitList = new ArrayList<>(1);
        splitList.add((CarbonHiveInputSplit) inputSplit);
    } else {
        throw new RuntimeException("unsupported input split type: " + inputSplit);
    }
    List<TableBlockInfo> tableBlockInfoList = CarbonHiveInputSplit.createBlocks(splitList);
    queryModel.setTableBlockInfos(tableBlockInfoList);
    readSupport.initialize(queryModel.getProjectionColumns(), queryModel.getAbsoluteTableIdentifier());
    try {
        carbonIterator = new ChunkRowIterator(queryExecutor.execute(queryModel));
    } catch (QueryExecutionException e) {
        throw new IOException(e.getMessage(), e.getCause());
    }
    if (valueObj == null) {
        valueObj = new ArrayWritable(Writable.class, new Writable[queryModel.getProjectionColumns().length]);
    }
    final TypeInfo rowTypeInfo;
    final List<String> columnNames;
    List<TypeInfo> columnTypes;
    // Get column names and sort order
    final String colIds = conf.get("hive.io.file.readcolumn.ids");
    final String columnNameProperty = conf.get("hive.io.file.readcolumn.names");
    final String columnTypeProperty = conf.get(serdeConstants.LIST_COLUMN_TYPES);
    if (columnNameProperty.length() == 0) {
        columnNames = new ArrayList<String>();
    } else {
        columnNames = Arrays.asList(columnNameProperty.split(","));
    }
    if (columnTypeProperty.length() == 0) {
        columnTypes = new ArrayList<TypeInfo>();
    } else {
        columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    }
    String[] arraySelectedColId = colIds.split(",");
    List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
    for (String anArrayColId : arraySelectedColId) {
        reqColTypes.add(columnTypes.get(Integer.parseInt(anArrayColId)));
    }
    // Create row related objects
    rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, reqColTypes);
    this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
}
Also used : TableBlockInfo(org.apache.carbondata.core.datastore.block.TableBlockInfo) ChunkRowIterator(org.apache.carbondata.core.scan.result.iterator.ChunkRowIterator) ArrayList(java.util.ArrayList) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) IOException(java.io.IOException) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException) ArrayWritable(org.apache.hadoop.io.ArrayWritable)

Example 4 with QueryExecutionException

use of org.apache.carbondata.core.scan.executor.exception.QueryExecutionException in project carbondata by apache.

the class VectorizedCarbonRecordReader method close.

@Override
public void close() throws IOException {
    logStatistics(rowCount, queryModel.getStatisticsRecorder());
    if (columnarBatch != null) {
        columnarBatch.close();
        columnarBatch = null;
    }
    // clear dictionary cache
    Map<String, Dictionary> columnToDictionaryMapping = queryModel.getColumnToDictionaryMapping();
    if (null != columnToDictionaryMapping) {
        for (Map.Entry<String, Dictionary> entry : columnToDictionaryMapping.entrySet()) {
            CarbonUtil.clearDictionaryCache(entry.getValue());
        }
    }
    try {
        queryExecutor.finish();
    } catch (QueryExecutionException e) {
        throw new IOException(e);
    }
}
Also used : Dictionary(org.apache.carbondata.core.cache.dictionary.Dictionary) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException) IOException(java.io.IOException) Map(java.util.Map)

Example 5 with QueryExecutionException

use of org.apache.carbondata.core.scan.executor.exception.QueryExecutionException in project carbondata by apache.

the class CarbonRecordReader method close.

@Override
public void close() throws IOException {
    logStatistics(rowCount, queryModel.getStatisticsRecorder());
    // clear dictionary cache
    Map<String, Dictionary> columnToDictionaryMapping = queryModel.getColumnToDictionaryMapping();
    if (null != columnToDictionaryMapping) {
        for (Map.Entry<String, Dictionary> entry : columnToDictionaryMapping.entrySet()) {
            CarbonUtil.clearDictionaryCache(entry.getValue());
        }
    }
    // close read support
    readSupport.close();
    try {
        queryExecutor.finish();
    } catch (QueryExecutionException e) {
        throw new IOException(e);
    }
}
Also used : Dictionary(org.apache.carbondata.core.cache.dictionary.Dictionary) QueryExecutionException(org.apache.carbondata.core.scan.executor.exception.QueryExecutionException) IOException(java.io.IOException) Map(java.util.Map)

Aggregations

QueryExecutionException (org.apache.carbondata.core.scan.executor.exception.QueryExecutionException)6 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 TableBlockInfo (org.apache.carbondata.core.datastore.block.TableBlockInfo)3 ChunkRowIterator (org.apache.carbondata.core.scan.result.iterator.ChunkRowIterator)3 Map (java.util.Map)2 Dictionary (org.apache.carbondata.core.cache.dictionary.Dictionary)2 HashSet (java.util.HashSet)1 LinkedHashSet (java.util.LinkedHashSet)1 IndexKey (org.apache.carbondata.core.datastore.IndexKey)1 SegmentProperties (org.apache.carbondata.core.datastore.block.SegmentProperties)1 KeyGenException (org.apache.carbondata.core.keygenerator.KeyGenException)1 KeyGenerator (org.apache.carbondata.core.keygenerator.KeyGenerator)1 CarbonDimension (org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension)1 CarbonMeasure (org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure)1 BlockExecutionInfo (org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo)1 QueryDimension (org.apache.carbondata.core.scan.model.QueryDimension)1 QueryMeasure (org.apache.carbondata.core.scan.model.QueryMeasure)1 BatchResult (org.apache.carbondata.core.scan.result.BatchResult)1 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)1