Search in sources :

Example 1 with DataType

use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.

the class CarbonUtil method getValueCompressionModel.

/**
   * Below method will be used to get the value compression model of the
   * measure data chunk
   *
   * @return value compression model
   */
public static WriterCompressModel getValueCompressionModel(List<ValueEncoderMeta> encodeMetaList) {
    Object[] maxValue = new Object[encodeMetaList.size()];
    Object[] minValue = new Object[encodeMetaList.size()];
    Object[] uniqueValue = new Object[encodeMetaList.size()];
    int[] decimal = new int[encodeMetaList.size()];
    DataType[] type = new DataType[encodeMetaList.size()];
    byte[] dataTypeSelected = new byte[encodeMetaList.size()];
    /*
     * to fill the meta data required for value compression model
     */
    for (int i = 0; i < dataTypeSelected.length; i++) {
        // always 1
        ValueEncoderMeta valueEncoderMeta = encodeMetaList.get(i);
        maxValue[i] = valueEncoderMeta.getMaxValue();
        minValue[i] = valueEncoderMeta.getMinValue();
        uniqueValue[i] = valueEncoderMeta.getUniqueValue();
        decimal[i] = valueEncoderMeta.getDecimal();
        type[i] = valueEncoderMeta.getType();
        dataTypeSelected[i] = valueEncoderMeta.getDataTypeSelected();
    }
    MeasureMetaDataModel measureMetadataModel = new MeasureMetaDataModel(minValue, maxValue, decimal, dataTypeSelected.length, uniqueValue, type, dataTypeSelected);
    return ValueCompressionUtil.getWriterCompressModel(measureMetadataModel);
}
Also used : MeasureMetaDataModel(org.apache.carbondata.core.datastore.compression.MeasureMetaDataModel) DataType(org.apache.carbondata.core.metadata.datatype.DataType) ValueEncoderMeta(org.apache.carbondata.core.metadata.ValueEncoderMeta)

Example 2 with DataType

use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.

the class AbstractQueryExecutor method initQuery.

/**
   * Below method will be used to fill the executor properties based on query
   * model it will parse the query model and get the detail and fill it in
   * query properties
   *
   * @param queryModel
   */
protected void initQuery(QueryModel queryModel) throws IOException {
    StandardLogService.setThreadName(StandardLogService.getPartitionID(queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName()), queryModel.getQueryId());
    LOGGER.info("Query will be executed on table: " + queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName());
    // add executor service for query execution
    queryProperties.executorService = Executors.newCachedThreadPool();
    // Initializing statistics list to record the query statistics
    // creating copy on write to handle concurrent scenario
    queryProperties.queryStatisticsRecorder = CarbonTimeStatisticsFactory.createExecutorRecorder(queryModel.getQueryId());
    queryModel.setStatisticsRecorder(queryProperties.queryStatisticsRecorder);
    QueryUtil.resolveQueryModel(queryModel);
    QueryStatistic queryStatistic = new QueryStatistic();
    // sort the block info
    // so block will be loaded in sorted order this will be required for
    // query execution
    Collections.sort(queryModel.getTableBlockInfos());
    // get the table blocks
    CacheProvider cacheProvider = CacheProvider.getInstance();
    BlockIndexStore<TableBlockUniqueIdentifier, AbstractIndex> cache = (BlockIndexStore) cacheProvider.createCache(CacheType.EXECUTOR_BTREE, queryModel.getTable().getStorePath());
    // remove the invalid table blocks, block which is deleted or compacted
    cache.removeTableBlocks(queryModel.getInvalidSegmentIds(), queryModel.getAbsoluteTableIdentifier());
    List<TableBlockUniqueIdentifier> tableBlockUniqueIdentifiers = prepareTableBlockUniqueIdentifier(queryModel.getTableBlockInfos(), queryModel.getAbsoluteTableIdentifier());
    cache.removeTableBlocksIfHorizontalCompactionDone(queryModel);
    queryProperties.dataBlocks = cache.getAll(tableBlockUniqueIdentifiers);
    queryStatistic.addStatistics(QueryStatisticsConstants.LOAD_BLOCKS_EXECUTOR, System.currentTimeMillis());
    queryProperties.queryStatisticsRecorder.recordStatistics(queryStatistic);
    // calculating the total number of aggeragted columns
    int aggTypeCount = queryModel.getQueryMeasures().size();
    int currentIndex = 0;
    DataType[] dataTypes = new DataType[aggTypeCount];
    for (QueryMeasure carbonMeasure : queryModel.getQueryMeasures()) {
        // adding the data type and aggregation type of all the measure this
        // can be used
        // to select the aggregator
        dataTypes[currentIndex] = carbonMeasure.getMeasure().getDataType();
        currentIndex++;
    }
    queryProperties.measureDataTypes = dataTypes;
    // as aggregation will be executed in following order
    // 1.aggregate dimension expression
    // 2. expression
    // 3. query measure
    // so calculating the index of the expression start index
    // and measure column start index
    queryProperties.filterMeasures = new HashSet<>();
    queryProperties.complexFilterDimension = new HashSet<>();
    QueryUtil.getAllFilterDimensions(queryModel.getFilterExpressionResolverTree(), queryProperties.complexFilterDimension, queryProperties.filterMeasures);
    queryStatistic = new QueryStatistic();
    // dictionary column unique column id to dictionary mapping
    // which will be used to get column actual data
    queryProperties.columnToDictionayMapping = QueryUtil.getDimensionDictionaryDetail(queryModel.getQueryDimension(), queryProperties.complexFilterDimension, queryModel.getAbsoluteTableIdentifier());
    queryStatistic.addStatistics(QueryStatisticsConstants.LOAD_DICTIONARY, System.currentTimeMillis());
    queryProperties.queryStatisticsRecorder.recordStatistics(queryStatistic);
    queryModel.setColumnToDictionaryMapping(queryProperties.columnToDictionayMapping);
}
Also used : BlockIndexStore(org.apache.carbondata.core.datastore.BlockIndexStore) QueryMeasure(org.apache.carbondata.core.scan.model.QueryMeasure) TableBlockUniqueIdentifier(org.apache.carbondata.core.datastore.block.TableBlockUniqueIdentifier) AbstractIndex(org.apache.carbondata.core.datastore.block.AbstractIndex) DataType(org.apache.carbondata.core.metadata.datatype.DataType) CacheProvider(org.apache.carbondata.core.cache.CacheProvider) QueryStatistic(org.apache.carbondata.core.stats.QueryStatistic)

Example 3 with DataType

use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.

the class ValueCompressionUtil method getWriterCompressModel.

/**
   * Create Value compression model for write path
   */
public static WriterCompressModel getWriterCompressModel(MeasureMetaDataModel measureMDMdl) {
    int measureCount = measureMDMdl.getMeasureCount();
    Object[] minValue = measureMDMdl.getMinValue();
    Object[] maxValue = measureMDMdl.getMaxValue();
    Object[] uniqueValue = measureMDMdl.getUniqueValue();
    int[] mantissa = measureMDMdl.getMantissa();
    DataType[] type = measureMDMdl.getType();
    byte[] dataTypeSelected = measureMDMdl.getDataTypeSelected();
    WriterCompressModel compressionModel = new WriterCompressModel();
    DataType[] actualType = new DataType[measureCount];
    DataType[] convertedType = new DataType[measureCount];
    CompressionFinder[] compressionFinders = new CompressionFinder[measureCount];
    for (int i = 0; i < measureCount; i++) {
        CompressionFinder compresssionFinder = ValueCompressionUtil.getCompressionFinder(maxValue[i], minValue[i], mantissa[i], type[i], dataTypeSelected[i]);
        compressionFinders[i] = compresssionFinder;
        actualType[i] = compresssionFinder.getActualDataType();
        convertedType[i] = compresssionFinder.getConvertedDataType();
    }
    compressionModel.setCompressionFinders(compressionFinders);
    compressionModel.setMaxValue(maxValue);
    compressionModel.setMantissa(mantissa);
    compressionModel.setConvertedDataType(convertedType);
    compressionModel.setActualDataType(actualType);
    compressionModel.setMinValue(minValue);
    compressionModel.setUniqueValue(uniqueValue);
    compressionModel.setType(type);
    compressionModel.setDataTypeSelected(dataTypeSelected);
    ValueCompressionHolder[] values = ValueCompressionUtil.getValueCompressionHolder(compressionFinders);
    compressionModel.setValueCompressionHolder(values);
    return compressionModel;
}
Also used : WriterCompressModel(org.apache.carbondata.core.datastore.compression.WriterCompressModel) ValueCompressionHolder(org.apache.carbondata.core.datastore.compression.ValueCompressionHolder) DataType(org.apache.carbondata.core.metadata.datatype.DataType)

Example 4 with DataType

use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.

the class ValueCompressionUtil method getLongCompressorFinder.

private static CompressionFinder getLongCompressorFinder(Object maxValue, Object minValue, int mantissa, byte dataTypeSelected, DataType measureStoreType) {
    DataType adaptiveDataType = getDataType((long) maxValue, mantissa, dataTypeSelected);
    int adaptiveSize = getSize(adaptiveDataType);
    DataType deltaDataType = null;
    // it is not possible to determine the compression type.
    if (adaptiveDataType == DataType.LONG) {
        deltaDataType = DataType.LONG;
    } else {
        deltaDataType = getDataType((long) maxValue - (long) minValue, mantissa, dataTypeSelected);
    }
    int deltaSize = getSize(deltaDataType);
    if (adaptiveSize > deltaSize) {
        return new CompressionFinder(COMPRESSION_TYPE.DELTA_DOUBLE, DataType.LONG, deltaDataType, measureStoreType);
    } else if (adaptiveSize < deltaSize) {
        return new CompressionFinder(COMPRESSION_TYPE.ADAPTIVE, DataType.LONG, deltaDataType, measureStoreType);
    } else {
        return new CompressionFinder(COMPRESSION_TYPE.ADAPTIVE, DataType.LONG, adaptiveDataType, measureStoreType);
    }
}
Also used : DataType(org.apache.carbondata.core.metadata.datatype.DataType)

Example 5 with DataType

use of org.apache.carbondata.core.metadata.datatype.DataType in project carbondata by apache.

the class ExpressionResult method equals.

@Override
public boolean equals(Object obj) {
    if (!(obj instanceof ExpressionResult)) {
        return false;
    }
    if (this == obj) {
        return true;
    }
    if (getClass() != obj.getClass()) {
        return false;
    }
    ExpressionResult objToCompare = (ExpressionResult) obj;
    boolean result = false;
    if (this.value == objToCompare.value) {
        return true;
    }
    if (this.isNull() || objToCompare.isNull()) {
        return false;
    }
    // make the comparison against the data type whose precedence is higher like
    // LONG precedence is higher than INT, so from int value we should get the long value
    // and then compare both the values. If done vice versa exception will be thrown
    // and comparison will fail
    DataType dataType = null;
    if (objToCompare.getDataType().getPrecedenceOrder() < this.getDataType().getPrecedenceOrder()) {
        dataType = this.getDataType();
    } else {
        dataType = objToCompare.getDataType();
    }
    try {
        switch(dataType) {
            case STRING:
                result = this.getString().equals(objToCompare.getString());
                break;
            case SHORT:
                result = this.getShort().equals(objToCompare.getShort());
                break;
            case INT:
                result = this.getInt().equals(objToCompare.getInt());
                break;
            case LONG:
            case DATE:
            case TIMESTAMP:
                result = this.getLong().equals(objToCompare.getLong());
                break;
            case DOUBLE:
                result = this.getDouble().equals(objToCompare.getDouble());
                break;
            case DECIMAL:
                result = this.getDecimal().equals(objToCompare.getDecimal());
                break;
            default:
                break;
        }
    } catch (FilterIllegalMemberException ex) {
        return false;
    }
    return result;
}
Also used : DataType(org.apache.carbondata.core.metadata.datatype.DataType) FilterIllegalMemberException(org.apache.carbondata.core.scan.expression.exception.FilterIllegalMemberException)

Aggregations

DataType (org.apache.carbondata.core.metadata.datatype.DataType)27 MeasureMetaDataModel (org.apache.carbondata.core.datastore.compression.MeasureMetaDataModel)9 WriterCompressModel (org.apache.carbondata.core.datastore.compression.WriterCompressModel)9 Test (org.junit.Test)8 IOException (java.io.IOException)3 List (java.util.List)3 Domain (com.facebook.presto.spi.predicate.Domain)2 Range (com.facebook.presto.spi.predicate.Range)2 TupleDomain (com.facebook.presto.spi.predicate.TupleDomain)2 ImmutableList (com.google.common.collect.ImmutableList)2 DataOutputStream (java.io.DataOutputStream)2 BigDecimal (java.math.BigDecimal)2 ArrayList (java.util.ArrayList)2 BitSet (java.util.BitSet)2 Comparator (java.util.Comparator)2 CarbonTable (org.apache.carbondata.core.metadata.schema.table.CarbonTable)2 ColumnExpression (org.apache.carbondata.core.scan.expression.ColumnExpression)2 Expression (org.apache.carbondata.core.scan.expression.Expression)2 LiteralExpression (org.apache.carbondata.core.scan.expression.LiteralExpression)2 AndExpression (org.apache.carbondata.core.scan.expression.logical.AndExpression)2