use of org.apache.carbondata.core.scan.model.QueryModel in project carbondata by apache.
the class CarbonInputFormat method getQueryModel.
public QueryModel getQueryModel(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException {
Configuration configuration = taskAttemptContext.getConfiguration();
CarbonTable carbonTable = getCarbonTable(configuration);
// getting the table absoluteTableIdentifier from the carbonTable
// to avoid unnecessary deserialization
AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
// query plan includes projection column
String projection = getColumnProjection(configuration);
CarbonQueryPlan queryPlan = CarbonInputFormatUtil.createQueryPlan(carbonTable, projection);
QueryModel queryModel = QueryModel.createModel(identifier, queryPlan, carbonTable);
// set the filter to the query model in order to filter blocklet before scan
Expression filter = getFilterPredicates(configuration);
CarbonInputFormatUtil.processFilterExpression(filter, carbonTable);
FilterResolverIntf filterIntf = CarbonInputFormatUtil.resolveFilter(filter, identifier);
queryModel.setFilterExpressionResolverTree(filterIntf);
// update the file level index store if there are invalid segment
if (inputSplit instanceof CarbonMultiBlockSplit) {
CarbonMultiBlockSplit split = (CarbonMultiBlockSplit) inputSplit;
List<String> invalidSegments = split.getAllSplits().get(0).getInvalidSegments();
if (invalidSegments.size() > 0) {
queryModel.setInvalidSegmentIds(invalidSegments);
}
List<UpdateVO> invalidTimestampRangeList = split.getAllSplits().get(0).getInvalidTimestampRange();
if ((null != invalidTimestampRangeList) && (invalidTimestampRangeList.size() > 0)) {
queryModel.setInvalidBlockForSegmentId(invalidTimestampRangeList);
}
}
return queryModel;
}
use of org.apache.carbondata.core.scan.model.QueryModel in project carbondata by apache.
the class CarbonCompactionExecutor method prepareQueryModel.
/**
* Preparing of the query model.
*
* @param blockList
* @return
*/
private QueryModel prepareQueryModel(List<TableBlockInfo> blockList) {
QueryModel model = new QueryModel();
model.setTableBlockInfos(blockList);
model.setForcedDetailRawQuery(true);
model.setFilterExpressionResolverTree(null);
List<QueryDimension> dims = new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
List<CarbonDimension> dimensions = carbonTable.getDimensionByTableName(carbonTable.getFactTableName());
for (CarbonDimension dim : dimensions) {
// check if dimension is deleted
QueryDimension queryDimension = new QueryDimension(dim.getColName());
queryDimension.setDimension(dim);
dims.add(queryDimension);
}
model.setQueryDimension(dims);
List<QueryMeasure> msrs = new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
List<CarbonMeasure> measures = carbonTable.getMeasureByTableName(carbonTable.getFactTableName());
for (CarbonMeasure carbonMeasure : measures) {
// check if measure is deleted
QueryMeasure queryMeasure = new QueryMeasure(carbonMeasure.getColName());
queryMeasure.setMeasure(carbonMeasure);
msrs.add(queryMeasure);
}
model.setQueryMeasures(msrs);
model.setQueryId(System.nanoTime() + "");
model.setAbsoluteTableIdentifier(carbonTable.getAbsoluteTableIdentifier());
model.setTable(carbonTable);
return model;
}
use of org.apache.carbondata.core.scan.model.QueryModel in project carbondata by apache.
the class CarbondataRecordSetProvider method getRecordSet.
@Override
public RecordSet getRecordSet(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<? extends ColumnHandle> columns) {
requireNonNull(split, "split is null");
requireNonNull(columns, "columns is null");
CarbondataSplit carbondataSplit = checkType(split, CarbondataSplit.class, "split is not class CarbondataSplit");
checkArgument(carbondataSplit.getConnectorId().equals(connectorId), "split is not for this connector");
String targetCols = "";
// Convert all columns handles
ImmutableList.Builder<CarbondataColumnHandle> handles = ImmutableList.builder();
for (ColumnHandle handle : columns) {
handles.add(checkType(handle, CarbondataColumnHandle.class, "handle"));
targetCols += ((CarbondataColumnHandle) handle).getColumnName() + ",";
}
// Build column projection(check the column order)
if (targetCols.length() > 0) {
targetCols = targetCols.substring(0, targetCols.length() - 1);
} else {
targetCols = null;
}
//String cols = String.join(",", columns.stream().map(a -> ((CarbondataColumnHandle)a).getColumnName()).collect(Collectors.toList()));
CarbonTableCacheModel tableCacheModel = carbonTableReader.getCarbonCache(carbondataSplit.getSchemaTableName());
checkNotNull(tableCacheModel, "tableCacheModel should not be null");
checkNotNull(tableCacheModel.carbonTable, "tableCacheModel.carbonTable should not be null");
checkNotNull(tableCacheModel.tableInfo, "tableCacheModel.tableInfo should not be null");
// Build Query Model
CarbonTable targetTable = tableCacheModel.carbonTable;
CarbonQueryPlan queryPlan = CarbonInputFormatUtil.createQueryPlan(targetTable, targetCols);
QueryModel queryModel = QueryModel.createModel(targetTable.getAbsoluteTableIdentifier(), queryPlan, targetTable);
// Push down filter
fillFilter2QueryModel(queryModel, carbondataSplit.getConstraints(), targetTable);
// Return new record set
return new CarbondataRecordSet(targetTable, session, carbondataSplit, handles.build(), queryModel);
}
use of org.apache.carbondata.core.scan.model.QueryModel in project carbondata by apache.
the class CarbonInputFormat method createRecordReader.
@Override
public RecordReader<Void, T> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
Configuration configuration = taskAttemptContext.getConfiguration();
QueryModel queryModel = getQueryModel(inputSplit, taskAttemptContext);
CarbonReadSupport<T> readSupport = getReadSupportClass(configuration);
return new CarbonRecordReader<T>(queryModel, readSupport);
}
use of org.apache.carbondata.core.scan.model.QueryModel in project carbondata by apache.
the class MapredCarbonInputFormat method getQueryModel.
public QueryModel getQueryModel(Configuration configuration) throws IOException {
CarbonTable carbonTable = getCarbonTable(configuration);
// getting the table absoluteTableIdentifier from the carbonTable
// to avoid unnecessary deserialization
AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
// query plan includes projection column
String projection = getColumnProjection(configuration);
if (projection == null) {
projection = configuration.get("hive.io.file.readcolumn.names");
}
CarbonQueryPlan queryPlan = CarbonInputFormatUtil.createQueryPlan(carbonTable, projection);
QueryModel queryModel = QueryModel.createModel(identifier, queryPlan, carbonTable);
// set the filter to the query model in order to filter blocklet before scan
Expression filter = getFilterPredicates(configuration);
CarbonInputFormatUtil.processFilterExpression(filter, carbonTable);
FilterResolverIntf filterIntf = CarbonInputFormatUtil.resolveFilter(filter, identifier);
queryModel.setFilterExpressionResolverTree(filterIntf);
return queryModel;
}
Aggregations