use of org.apache.carbondata.core.scan.model.QueryModelBuilder in project carbondata by apache.
the class CarbonInputFormat method createQueryModel.
public QueryModel createQueryModel(InputSplit inputSplit, TaskAttemptContext taskAttemptContext, IndexFilter indexFilter) throws IOException {
Configuration configuration = taskAttemptContext.getConfiguration();
CarbonTable carbonTable = getOrCreateCarbonTable(configuration);
// set projection column in the query model
String projectionString = getColumnProjection(configuration);
String[] projectColumns;
if (projectionString != null) {
projectColumns = projectionString.split(",");
} else {
projectColumns = new String[] {};
}
if (indexFilter != null) {
boolean hasColumnDrift = carbonTable.isTransactionalTable() && carbonTable.hasColumnDrift();
checkAndAddImplicitExpression(indexFilter, inputSplit, hasColumnDrift);
}
QueryModel queryModel = new QueryModelBuilder(carbonTable).projectColumns(projectColumns).filterExpression(indexFilter).dataConverter(getDataTypeConverter(configuration)).build();
String readDeltaOnly = configuration.get(READ_ONLY_DELTA);
if (Boolean.parseBoolean(readDeltaOnly)) {
queryModel.setReadOnlyDelta(true);
}
return queryModel;
}
use of org.apache.carbondata.core.scan.model.QueryModelBuilder in project carbondata by apache.
the class CarbonCompactionExecutor method processTableBlocks.
/**
* For processing of the table blocks.
*
* @return Map of String with Carbon iterators
* Map has 2 elements: UNSORTED and SORTED
* Map(UNSORTED) = List of Iterators which yield sorted data
* Map(Sorted) = List of Iterators which yield sorted data
* In Range Column compaction we will have a Filter Expression to process
*/
public Map<String, List<RawResultIterator>> processTableBlocks(Configuration configuration, Expression filterExpr) throws IOException {
Map<String, List<RawResultIterator>> resultList = new HashMap<>(2);
resultList.put(CarbonCompactionUtil.UNSORTED_IDX, new ArrayList<RawResultIterator>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE));
resultList.put(CarbonCompactionUtil.SORTED_IDX, new ArrayList<RawResultIterator>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE));
List<TableBlockInfo> tableBlockInfos = null;
QueryModelBuilder builder = null;
if (null == filterExpr) {
builder = new QueryModelBuilder(carbonTable).projectAllColumns().dataConverter(dataTypeConverter).enableForcedDetailRawQuery();
} else {
builder = new QueryModelBuilder(carbonTable).projectAllColumns().filterExpression(new IndexFilter(carbonTable, filterExpr)).dataConverter(dataTypeConverter).enableForcedDetailRawQuery().convertToRangeFilter(false);
}
if (enablePageLevelReaderForCompaction()) {
builder.enableReadPageByPage();
}
queryModel = builder.build();
// iterate each seg ID
for (Map.Entry<String, TaskBlockInfo> taskMap : segmentMapping.entrySet()) {
String segmentId = taskMap.getKey();
List<DataFileFooter> listMetadata = dataFileMetadataSegMapping.get(segmentId);
// for each segment get taskblock info
TaskBlockInfo taskBlockInfo = taskMap.getValue();
Set<String> taskBlockListMapping = taskBlockInfo.getTaskSet();
// Check if block needs sorting or not
boolean sortingRequired = !CarbonCompactionUtil.isSortedByCurrentSortColumns(carbonTable, listMetadata.get(0));
for (String task : taskBlockListMapping) {
tableBlockInfos = taskBlockInfo.getTableBlockInfoList(task);
// during update there may be a chance that the cardinality may change within the segment
// which may lead to failure while converting the row, so get all the blocks present in a
// task and then split into multiple lists of same column values and create separate
// RawResultIterator for each tableBlockInfo of same column values. If all the blocks have
// same column values, then make a single RawResultIterator for all the blocks
List<List<TableBlockInfo>> listOfTableBlocksBasedOnKeyLength = getListOfTableBlocksBasedOnColumnValueSize(tableBlockInfos);
for (List<TableBlockInfo> tableBlockInfoList : listOfTableBlocksBasedOnKeyLength) {
Collections.sort(tableBlockInfoList);
LOGGER.info("for task -" + task + "- in segment id -" + segmentId + "- block size is -" + tableBlockInfos.size());
queryModel.setTableBlockInfos(tableBlockInfoList);
if (sortingRequired) {
resultList.get(CarbonCompactionUtil.UNSORTED_IDX).add(getRawResultIterator(configuration, segmentId, task, tableBlockInfoList));
} else {
resultList.get(CarbonCompactionUtil.SORTED_IDX).add(getRawResultIterator(configuration, segmentId, task, tableBlockInfoList));
}
}
}
}
return resultList;
}
use of org.apache.carbondata.core.scan.model.QueryModelBuilder in project carbondata by apache.
the class CarbonSecondaryIndexExecutor method prepareQueryModel.
/**
* Preparing the query model.
*/
public QueryModel prepareQueryModel() {
// Add implicit column position id or row id in case of secondary index creation
List<CarbonDimension> implicitDimensionList = carbonTable.getImplicitDimensions();
String[] columnsArray = new String[implicitDimensionList.size() + secondaryIndexColumns.length];
int j = 0;
for (String secondaryIndexColumn : secondaryIndexColumns) {
columnsArray[j] = secondaryIndexColumn;
j++;
}
for (int i = 0; i < implicitDimensionList.size(); i++) {
columnsArray[j] = implicitDimensionList.get(i).getColName();
j++;
}
QueryModelBuilder builder = new QueryModelBuilder(carbonTable).projectColumns(columnsArray).dataConverter(dataTypeConverter).enableForcedDetailRawQuery();
QueryModel model = builder.build();
model.setQueryId(System.nanoTime() + "");
return model;
}
use of org.apache.carbondata.core.scan.model.QueryModelBuilder in project carbondata by apache.
the class MapredCarbonInputFormat method getQueryModel.
private QueryModel getQueryModel(Configuration configuration, String path) throws IOException, InvalidConfigurationException, SQLException {
CarbonTable carbonTable = getCarbonTable(configuration, path);
String projectionString = getProjection(configuration, carbonTable);
String[] projectionColumns = projectionString.split(",");
return new QueryModelBuilder(carbonTable).projectColumns(projectionColumns).filterExpression(getFilterPredicates(configuration)).dataConverter(new DataTypeConverterImpl()).build();
}
Aggregations