use of org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure in project carbondata by apache.
the class CarbonCompactionUtil method updateColumnSchemaAndGetCardinality.
/**
* This method will return the updated cardinality according to the master schema
*
* @param columnCardinalityMap
* @param carbonTable
* @param updatedColumnSchemaList
* @return
*/
public static int[] updateColumnSchemaAndGetCardinality(Map<String, Integer> columnCardinalityMap, CarbonTable carbonTable, List<ColumnSchema> updatedColumnSchemaList) {
List<CarbonDimension> masterDimensions = carbonTable.getDimensionByTableName(carbonTable.getTableName());
List<Integer> updatedCardinalityList = new ArrayList<>(columnCardinalityMap.size());
for (CarbonDimension dimension : masterDimensions) {
Integer value = columnCardinalityMap.get(dimension.getColumnId());
if (null == value) {
updatedCardinalityList.add(getDimensionDefaultCardinality(dimension));
} else {
updatedCardinalityList.add(value);
}
updatedColumnSchemaList.add(dimension.getColumnSchema());
}
// add measures to the column schema list
List<CarbonMeasure> masterSchemaMeasures = carbonTable.getMeasureByTableName(carbonTable.getTableName());
for (CarbonMeasure measure : masterSchemaMeasures) {
updatedColumnSchemaList.add(measure.getColumnSchema());
}
return ArrayUtils.toPrimitive(updatedCardinalityList.toArray(new Integer[updatedCardinalityList.size()]));
}
use of org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure in project carbondata by apache.
the class BlockletDataMap method updateMinValues.
/**
* Fill the measures min values with minimum , this is needed for backward version compatability
* as older versions don't store min values for measures
*/
private byte[][] updateMinValues(byte[][] minValues, int[] minMaxLen) {
byte[][] updatedValues = minValues;
if (minValues.length < minMaxLen.length) {
updatedValues = new byte[minMaxLen.length][];
System.arraycopy(minValues, 0, updatedValues, 0, minValues.length);
List<CarbonMeasure> measures = segmentProperties.getMeasures();
ByteBuffer buffer = ByteBuffer.allocate(8);
for (int i = 0; i < measures.size(); i++) {
buffer.rewind();
DataType dataType = measures.get(i).getDataType();
if (dataType == DataTypes.BYTE) {
buffer.putLong(Byte.MIN_VALUE);
updatedValues[minValues.length + i] = buffer.array().clone();
} else if (dataType == DataTypes.SHORT) {
buffer.putLong(Short.MIN_VALUE);
updatedValues[minValues.length + i] = buffer.array().clone();
} else if (dataType == DataTypes.INT) {
buffer.putLong(Integer.MIN_VALUE);
updatedValues[minValues.length + i] = buffer.array().clone();
} else if (dataType == DataTypes.LONG) {
buffer.putLong(Long.MIN_VALUE);
updatedValues[minValues.length + i] = buffer.array().clone();
} else if (DataTypes.isDecimal(dataType)) {
updatedValues[minValues.length + i] = DataTypeUtil.bigDecimalToByte(BigDecimal.valueOf(Long.MIN_VALUE));
} else {
buffer.putDouble(Double.MIN_VALUE);
updatedValues[minValues.length + i] = buffer.array().clone();
}
}
}
return updatedValues;
}
use of org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure in project carbondata by apache.
the class BlockletDataMap method updateMaxValues.
/**
* Fill the measures max values with maximum , this is needed for backward version compatability
* as older versions don't store max values for measures
*/
private byte[][] updateMaxValues(byte[][] maxValues, int[] minMaxLen) {
byte[][] updatedValues = maxValues;
if (maxValues.length < minMaxLen.length) {
updatedValues = new byte[minMaxLen.length][];
System.arraycopy(maxValues, 0, updatedValues, 0, maxValues.length);
List<CarbonMeasure> measures = segmentProperties.getMeasures();
ByteBuffer buffer = ByteBuffer.allocate(8);
for (int i = 0; i < measures.size(); i++) {
buffer.rewind();
DataType dataType = measures.get(i).getDataType();
if (dataType == DataTypes.BYTE) {
buffer.putLong(Byte.MAX_VALUE);
updatedValues[maxValues.length + i] = buffer.array().clone();
} else if (dataType == DataTypes.SHORT) {
buffer.putLong(Short.MAX_VALUE);
updatedValues[maxValues.length + i] = buffer.array().clone();
} else if (dataType == DataTypes.INT) {
buffer.putLong(Integer.MAX_VALUE);
updatedValues[maxValues.length + i] = buffer.array().clone();
} else if (dataType == DataTypes.LONG) {
buffer.putLong(Long.MAX_VALUE);
updatedValues[maxValues.length + i] = buffer.array().clone();
} else if (DataTypes.isDecimal(dataType)) {
updatedValues[maxValues.length + i] = DataTypeUtil.bigDecimalToByte(BigDecimal.valueOf(Long.MAX_VALUE));
} else {
buffer.putDouble(Double.MAX_VALUE);
updatedValues[maxValues.length + i] = buffer.array().clone();
}
}
}
return updatedValues;
}
use of org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure in project carbondata by apache.
the class RestructureUtil method createMeasureInfoAndGetCurrentBlockQueryMeasures.
/**
* Below method will be used to prepare the measure info object
* in this method some of the properties which will be extracted
* from query measure and current block measures will be set
*
* @param blockExecutionInfo
* @param queryMeasures measures present in query
* @param currentBlockMeasures current block measures
* @return measures present in the block
*/
public static List<ProjectionMeasure> createMeasureInfoAndGetCurrentBlockQueryMeasures(BlockExecutionInfo blockExecutionInfo, List<ProjectionMeasure> queryMeasures, List<CarbonMeasure> currentBlockMeasures) {
MeasureInfo measureInfo = new MeasureInfo();
List<ProjectionMeasure> presentMeasure = new ArrayList<>(queryMeasures.size());
int numberOfMeasureInQuery = queryMeasures.size();
List<Integer> measureOrdinalList = new ArrayList<>(numberOfMeasureInQuery);
Object[] defaultValues = new Object[numberOfMeasureInQuery];
boolean[] measureExistsInCurrentBlock = new boolean[numberOfMeasureInQuery];
int index = 0;
for (ProjectionMeasure queryMeasure : queryMeasures) {
// otherwise adding a default value of a measure
for (CarbonMeasure carbonMeasure : currentBlockMeasures) {
if (carbonMeasure.getColumnId().equals(queryMeasure.getMeasure().getColumnId())) {
ProjectionMeasure currentBlockMeasure = new ProjectionMeasure(carbonMeasure);
carbonMeasure.getColumnSchema().setDataType(queryMeasure.getMeasure().getDataType());
carbonMeasure.getColumnSchema().setPrecision(queryMeasure.getMeasure().getPrecision());
carbonMeasure.getColumnSchema().setScale(queryMeasure.getMeasure().getScale());
carbonMeasure.getColumnSchema().setDefaultValue(queryMeasure.getMeasure().getDefaultValue());
currentBlockMeasure.setOrdinal(queryMeasure.getOrdinal());
presentMeasure.add(currentBlockMeasure);
measureOrdinalList.add(carbonMeasure.getOrdinal());
measureExistsInCurrentBlock[index] = true;
break;
}
}
if (!measureExistsInCurrentBlock[index]) {
defaultValues[index] = getMeasureDefaultValue(queryMeasure.getMeasure().getColumnSchema(), queryMeasure.getMeasure().getDefaultValue());
blockExecutionInfo.setRestructuredBlock(true);
}
index++;
}
int[] measureOrdinals = ArrayUtils.toPrimitive(measureOrdinalList.toArray(new Integer[measureOrdinalList.size()]));
measureInfo.setDefaultValues(defaultValues);
measureInfo.setMeasureOrdinals(measureOrdinals);
measureInfo.setMeasureExists(measureExistsInCurrentBlock);
blockExecutionInfo.setMeasureInfo(measureInfo);
return presentMeasure;
}
use of org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure in project carbondata by apache.
the class AbstractQueryExecutor method getBlockExecutionInfoForBlock.
/**
* Below method will be used to get the block execution info which is
* required to execute any block based on query model
*
* @param queryModel query model from user query
* @param blockIndex block index
* @return block execution info
* @throws QueryExecutionException any failure during block info creation
*/
private BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel, AbstractIndex blockIndex, int startBlockletIndex, int numberOfBlockletToScan, String filePath, String[] deleteDeltaFiles, String segmentId) throws QueryExecutionException {
BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
// below is to get only those dimension in query which is present in the
// table block
List<ProjectionDimension> projectDimensions = RestructureUtil.createDimensionInfoAndGetCurrentBlockQueryDimension(blockExecutionInfo, queryModel.getProjectionDimensions(), tableBlockDimensions, segmentProperties.getComplexDimensions(), queryModel.getProjectionMeasures().size());
blockExecutionInfo.setBlockId(CarbonUtil.getBlockId(queryModel.getAbsoluteTableIdentifier(), filePath, segmentId));
blockExecutionInfo.setDeleteDeltaFilePath(deleteDeltaFiles);
blockExecutionInfo.setStartBlockletIndex(startBlockletIndex);
blockExecutionInfo.setNumberOfBlockletToScan(numberOfBlockletToScan);
blockExecutionInfo.setProjectionDimensions(projectDimensions.toArray(new ProjectionDimension[projectDimensions.size()]));
// get measures present in the current block
List<ProjectionMeasure> currentBlockQueryMeasures = getCurrentBlockQueryMeasures(blockExecutionInfo, queryModel, blockIndex);
blockExecutionInfo.setProjectionMeasures(currentBlockQueryMeasures.toArray(new ProjectionMeasure[currentBlockQueryMeasures.size()]));
blockExecutionInfo.setDataBlock(blockIndex);
// setting whether raw record query or not
blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
// total number dimension
blockExecutionInfo.setTotalNumberDimensionToRead(segmentProperties.getDimensionOrdinalToChunkMapping().size());
blockExecutionInfo.setPrefetchBlocklet(!queryModel.isReadPageByPage());
blockExecutionInfo.setTotalNumberOfMeasureToRead(segmentProperties.getMeasuresOrdinalToChunkMapping().size());
blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil.getComplexDimensionsMap(projectDimensions, segmentProperties.getDimensionOrdinalToChunkMapping(), segmentProperties.getEachComplexDimColumnValueSize(), queryProperties.columnToDictionaryMapping, queryProperties.complexFilterDimension));
IndexKey startIndexKey = null;
IndexKey endIndexKey = null;
if (null != queryModel.getFilterExpressionResolverTree()) {
// loading the filter executor tree for filter evaluation
blockExecutionInfo.setFilterExecuterTree(FilterUtil.getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties, blockExecutionInfo.getComlexDimensionInfoMap()));
}
try {
startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
} catch (KeyGenException e) {
throw new QueryExecutionException(e);
}
// setting the start index key of the block node
blockExecutionInfo.setStartKey(startIndexKey);
// setting the end index key of the block node
blockExecutionInfo.setEndKey(endIndexKey);
// expression dimensions
List<CarbonDimension> expressionDimensions = new ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// expression measure
List<CarbonMeasure> expressionMeasures = new ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// setting all the dimension chunk indexes to be read from file
int numberOfElementToConsider = 0;
// list of dimensions to be projected
Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
// create a list of filter dimensions present in the current block
Set<CarbonDimension> currentBlockFilterDimensions = getCurrentBlockFilterDimensions(queryProperties.complexFilterDimension, segmentProperties);
int[] dimensionChunkIndexes = QueryUtil.getDimensionChunkIndexes(projectDimensions, segmentProperties.getDimensionOrdinalToChunkMapping(), expressionDimensions, currentBlockFilterDimensions, allProjectionListDimensionIdexes);
int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance().getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO, CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE));
if (dimensionChunkIndexes.length > 0) {
numberOfElementToConsider = dimensionChunkIndexes[dimensionChunkIndexes.length - 1] == segmentProperties.getBlockTodimensionOrdinalMapping().size() - 1 ? dimensionChunkIndexes.length - 1 : dimensionChunkIndexes.length;
blockExecutionInfo.setAllSelectedDimensionColumnIndexRange(CarbonUtil.getRangeIndex(dimensionChunkIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
} else {
blockExecutionInfo.setAllSelectedDimensionColumnIndexRange(new int[0][0]);
}
// get the list of updated filter measures present in the current block
Set<CarbonMeasure> filterMeasures = getCurrentBlockFilterMeasures(queryProperties.filterMeasures, segmentProperties);
// list of measures to be projected
List<Integer> allProjectionListMeasureIndexes = new ArrayList<>();
int[] measureChunkIndexes = QueryUtil.getMeasureChunkIndexes(currentBlockQueryMeasures, expressionMeasures, segmentProperties.getMeasuresOrdinalToChunkMapping(), filterMeasures, allProjectionListMeasureIndexes);
if (measureChunkIndexes.length > 0) {
numberOfElementToConsider = measureChunkIndexes[measureChunkIndexes.length - 1] == segmentProperties.getMeasures().size() - 1 ? measureChunkIndexes.length - 1 : measureChunkIndexes.length;
// setting all the measure chunk indexes to be read from file
blockExecutionInfo.setAllSelectedMeasureIndexRange(CarbonUtil.getRangeIndex(measureChunkIndexes, numberOfElementToConsider, numberOfColumnToBeReadInOneIO));
} else {
blockExecutionInfo.setAllSelectedMeasureIndexRange(new int[0][0]);
}
// setting the indexes of list of dimension in projection list
blockExecutionInfo.setProjectionListDimensionIndexes(ArrayUtils.toPrimitive(allProjectionListDimensionIdexes.toArray(new Integer[allProjectionListDimensionIdexes.size()])));
// setting the indexes of list of measures in projection list
blockExecutionInfo.setProjectionListMeasureIndexes(ArrayUtils.toPrimitive(allProjectionListMeasureIndexes.toArray(new Integer[allProjectionListMeasureIndexes.size()])));
// setting the size of fixed key column (dictionary column)
blockExecutionInfo.setFixedLengthKeySize(getKeySize(projectDimensions, segmentProperties));
Set<Integer> dictionaryColumnChunkIndex = new HashSet<Integer>();
List<Integer> noDictionaryColumnChunkIndex = new ArrayList<Integer>();
// get the block index to be read from file for query dimension
// for both dictionary columns and no dictionary columns
QueryUtil.fillQueryDimensionChunkIndexes(projectDimensions, segmentProperties.getDimensionOrdinalToChunkMapping(), dictionaryColumnChunkIndex, noDictionaryColumnChunkIndex);
int[] queryDictionaryColumnChunkIndexes = ArrayUtils.toPrimitive(dictionaryColumnChunkIndex.toArray(new Integer[dictionaryColumnChunkIndex.size()]));
// need to sort the dictionary column as for all dimension
// column key will be filled based on key order
Arrays.sort(queryDictionaryColumnChunkIndexes);
blockExecutionInfo.setDictionaryColumnChunkIndex(queryDictionaryColumnChunkIndexes);
// setting the no dictionary column block indexes
blockExecutionInfo.setNoDictionaryColumnChunkIndexes(ArrayUtils.toPrimitive(noDictionaryColumnChunkIndex.toArray(new Integer[noDictionaryColumnChunkIndex.size()])));
// setting each column value size
blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
blockExecutionInfo.setComplexColumnParentBlockIndexes(getComplexDimensionParentBlockIndexes(projectDimensions));
blockExecutionInfo.setVectorBatchCollector(queryModel.isVectorReader());
try {
// to set column group and its key structure info which will be used
// to
// for getting the column group column data in case of final row
// and in case of dimension aggregation
blockExecutionInfo.setColumnGroupToKeyStructureInfo(QueryUtil.getColumnGroupKeyStructureInfo(projectDimensions, segmentProperties));
} catch (KeyGenException e) {
throw new QueryExecutionException(e);
}
// set actual query dimensions and measures. It may differ in case of restructure scenarios
blockExecutionInfo.setActualQueryDimensions(queryModel.getProjectionDimensions().toArray(new ProjectionDimension[queryModel.getProjectionDimensions().size()]));
blockExecutionInfo.setActualQueryMeasures(queryModel.getProjectionMeasures().toArray(new ProjectionMeasure[queryModel.getProjectionMeasures().size()]));
DataTypeUtil.setDataTypeConverter(queryModel.getConverter());
return blockExecutionInfo;
}
Aggregations