use of com.pingcap.tikv.expression.Expression in project tispark by pingcap.
the class ProtoConverter method scalarToPartialProto.
// Generate protobuf builder with partial data encoded.
// Scalar Signature is left alone
private Expr.Builder scalarToPartialProto(Expression node, Object context) {
Expr.Builder builder = Expr.newBuilder();
// Scalar function type
builder.setTp(ExprType.ScalarFunc);
// Return type
builder.setFieldType(toPBFieldType(getType(node)));
for (Expression child : node.getChildren()) {
Expr exprProto = child.accept(this, context);
builder.addChildren(exprProto);
}
return builder;
}
use of com.pingcap.tikv.expression.Expression in project tispark by pingcap.
the class ProtoConverter method visit.
@Override
protected Expr visit(ComparisonBinaryExpression node, Object context) {
NormalizedPredicate predicate = node.normalize();
if (predicate.getValue().isOverflowed()) {
throw new UnsupportedOperationException("overflowed ComparisonBinaryExpression cannot be pushed down");
}
Expression child = node.getLeft();
String typeSignature = getTypeSignature(child);
ScalarFuncSig protoSig;
switch(node.getComparisonType()) {
case EQUAL:
protoSig = ScalarFuncSig.valueOf("EQ" + typeSignature);
break;
case GREATER_EQUAL:
protoSig = ScalarFuncSig.valueOf("GE" + typeSignature);
break;
case GREATER_THAN:
protoSig = ScalarFuncSig.valueOf("GT" + typeSignature);
break;
case LESS_EQUAL:
protoSig = ScalarFuncSig.valueOf("LE" + typeSignature);
break;
case LESS_THAN:
protoSig = ScalarFuncSig.valueOf("LT" + typeSignature);
break;
case NOT_EQUAL:
protoSig = ScalarFuncSig.valueOf("NE" + typeSignature);
break;
default:
throw new TiExpressionException(String.format("Unknown comparison type %s", node.getComparisonType()));
}
Expr.Builder builder = scalarToPartialProto(node, context);
builder.setSig(protoSig);
builder.setFieldType(toPBFieldType(getType(node)));
return builder.build();
}
use of com.pingcap.tikv.expression.Expression in project tispark by pingcap.
the class TiDAGRequest method buildScan.
/**
* Unify indexScan and tableScan building logic since they are very much alike. DAGRequest for
* IndexScan should also contain filters and aggregation, so we can reuse this part of logic.
*
* <p>DAGRequest is made up of a chain of executors with strict orders: TableScan/IndexScan >
* Selection > Aggregation > TopN/Limit a DAGRequest must contain one and only one TableScan or
* IndexScan.
*
* @param buildIndexScan whether the dagRequest to build should be an {@link
* com.pingcap.tidb.tipb.IndexScan}
* @return final DAGRequest built
*/
private DAGRequest.Builder buildScan(boolean buildIndexScan, List<Integer> outputOffsets) {
long id = getPhysicalId();
checkNotNull(startTs, "startTs is null");
checkArgument(startTs.getVersion() != 0, "timestamp is 0");
clearPushDownInfo();
DAGRequest.Builder dagRequestBuilder = DAGRequest.newBuilder();
Executor.Builder executorBuilder = Executor.newBuilder();
IndexScan.Builder indexScanBuilder = IndexScan.newBuilder();
TableScan.Builder tblScanBuilder = TableScan.newBuilder();
// find a column's offset in fields
Map<String, Integer> colOffsetInFieldMap = new HashMap<>();
// find a column's position in index
Map<String, Integer> colPosInIndexMap = new HashMap<>();
if (buildIndexScan) {
// IndexScan
if (indexInfo == null) {
throw new TiClientInternalException("Index is empty for index scan");
}
List<TiColumnInfo> columnInfoList = tableInfo.getColumns();
boolean hasPk = false;
// We extract index column info
List<Integer> indexColOffsets = indexInfo.getIndexColumns().stream().map(TiIndexColumn::getOffset).collect(Collectors.toList());
int idxPos = 0;
// for index scan builder, columns are added by its order in index
for (Integer idx : indexColOffsets) {
TiColumnInfo tiColumnInfo = columnInfoList.get(idx);
ColumnInfo columnInfo = tiColumnInfo.toProto(tableInfo);
colPosInIndexMap.put(tiColumnInfo.getName(), idxPos++);
ColumnInfo.Builder colBuilder = ColumnInfo.newBuilder(columnInfo);
if (columnInfo.getColumnId() == -1) {
hasPk = true;
colBuilder.setPkHandle(true);
}
indexScanBuilder.addColumns(colBuilder);
}
int colCount = indexScanBuilder.getColumnsCount();
if (isDoubleRead()) {
// TODO: we may merge indexDoubleRead and coveringIndexRead logic
for (ColumnRef col : getFields()) {
Integer pos = colPosInIndexMap.get(col.getName());
if (pos != null) {
TiColumnInfo columnInfo = columnInfoList.get(indexColOffsets.get(pos));
if (col.matchName(columnInfo.getName())) {
colOffsetInFieldMap.put(col.getName(), pos);
}
// TODO: primary key may also be considered if pkIsHandle
}
}
// double read case
if (!hasPk) {
// add handle column
if (!tableInfo.isCommonHandle()) {
indexScanBuilder.addColumns(handleColumn);
++colCount;
} else {
for (TiIndexColumn col : tableInfo.getPrimaryKey().getIndexColumns()) {
indexScanBuilder.addColumns(tableInfo.getColumn(col.getName()).toProto(tableInfo));
++colCount;
}
}
addRequiredIndexDataType();
}
if (colCount == 0) {
throw new DAGRequestException("Incorrect index scan with zero column count");
}
if (!tableInfo.isCommonHandle()) {
outputOffsets.add(colCount - 1);
} else {
int idxColSize = tableInfo.getPrimaryKey().getIndexColumns().size();
for (int i = idxColSize; i >= 1; i--) {
outputOffsets.add(colCount - i);
}
}
} else {
boolean pkIsNeeded = false;
// offset for dagRequest should be in accordance with fields
for (ColumnRef col : getFields()) {
Integer pos = colPosInIndexMap.get(col.getName());
if (pos != null) {
TiColumnInfo columnInfo = columnInfoList.get(indexColOffsets.get(pos));
if (col.matchName(columnInfo.getName())) {
outputOffsets.add(pos);
colOffsetInFieldMap.put(col.getName(), pos);
}
} else // logically it must be the pk column. Extra check here.
if (tableInfo.getColumn(col.getName()).isPrimaryKey()) {
pkIsNeeded = true;
// offset should be processed for each primary key encountered
outputOffsets.add(colCount);
// for index scan, column offset must be in the order of index->handle
colOffsetInFieldMap.put(col.getName(), indexColOffsets.size());
} else {
throw new DAGRequestException("columns other than primary key and index key exist in fields while index single read: " + col.getName());
}
}
// pk is not included in index but still needed
if (pkIsNeeded) {
if (!tableInfo.isCommonHandle()) {
indexScanBuilder.addColumns(handleColumn);
}
}
}
executorBuilder.setTp(ExecType.TypeIndexScan);
indexScanBuilder.setTableId(id).setIndexId(indexInfo.getId());
if (tableInfo.isCommonHandle()) {
for (TiIndexColumn col : tableInfo.getPrimaryKey().getIndexColumns()) {
indexScanBuilder.addPrimaryColumnIds(tableInfo.getColumn(col.getName()).getId());
}
}
dagRequestBuilder.addExecutors(executorBuilder.setIdxScan(indexScanBuilder).build());
} else {
// TableScan
executorBuilder.setTp(ExecType.TypeTableScan);
tblScanBuilder.setTableId(id);
if (tableInfo.isCommonHandle()) {
for (TiIndexColumn col : tableInfo.getPrimaryKey().getIndexColumns()) {
tblScanBuilder.addPrimaryColumnIds(tableInfo.getColumn(col.getName()).getId());
}
}
// Step1. Add columns to first executor
int lastOffset = 0;
for (ColumnRef col : getFields()) {
// can't allow duplicated col added into executor.
if (!colOffsetInFieldMap.containsKey(col.getName())) {
tblScanBuilder.addColumns(tableInfo.getColumn(col.getName()).toProto(tableInfo));
colOffsetInFieldMap.put(col.getName(), lastOffset);
lastOffset++;
}
// column offset should be in accordance with fields
outputOffsets.add(colOffsetInFieldMap.get(col.getName()));
}
dagRequestBuilder.addExecutors(executorBuilder.setTblScan(tblScanBuilder));
}
boolean isIndexDoubleScan = buildIndexScan && isDoubleRead();
// Should build these executors when performing CoveringIndexScan/TableScan
// clear executorBuilder
executorBuilder.clear();
// Step2. Add others
// DO NOT EDIT EXPRESSION CONSTRUCTION ORDER
// Or make sure the construction order is below:
// TableScan/IndexScan > Selection > Aggregation > TopN/Limit
Expression whereExpr = mergeCNFExpressions(getFilters());
if (whereExpr != null) {
if (!isIndexDoubleScan || isExpressionCoveredByIndex(whereExpr)) {
executorBuilder.setTp(ExecType.TypeSelection);
dagRequestBuilder.addExecutors(executorBuilder.setSelection(Selection.newBuilder().addConditions(ProtoConverter.toProto(whereExpr, colOffsetInFieldMap))));
executorBuilder.clear();
addPushDownFilters();
} else {
return dagRequestBuilder;
}
}
if (!getGroupByItems().isEmpty() || !getAggregates().isEmpty()) {
// only allow table scan or covering index scan push down groupby and agg
if (!isIndexDoubleScan || (isGroupByCoveredByIndex() && isAggregateCoveredByIndex())) {
pushDownAggAndGroupBy(dagRequestBuilder, executorBuilder, outputOffsets, colOffsetInFieldMap);
} else {
return dagRequestBuilder;
}
}
if (!getOrderByItems().isEmpty()) {
if (!isIndexDoubleScan || isOrderByCoveredByIndex()) {
// only allow table scan or covering index scan push down orderby
pushDownOrderBy(dagRequestBuilder, executorBuilder, colOffsetInFieldMap);
}
} else if (getLimit() != 0) {
if (!isIndexDoubleScan) {
pushDownLimit(dagRequestBuilder, executorBuilder);
}
}
return dagRequestBuilder;
}
use of com.pingcap.tikv.expression.Expression in project tispark by pingcap.
the class PredicateUtils method expressionToPoints.
/**
* Turn access conditions into list of points Each condition is bound to single key We pick up
* single condition for each index key and disregard if multiple EQ conditions in DNF
*
* @param pointPredicates expressions that convertible to access points
* @return access points for each index
*/
private static List<Key> expressionToPoints(List<Expression> pointPredicates, TiTableInfo table, TiIndexInfo index) {
requireNonNull(pointPredicates, "pointPredicates cannot be null");
List<Key> resultKeys = new ArrayList<>();
IndexRangeSetBuilder indexRangeBuilder = new IndexRangeSetBuilder(table, index);
for (int i = 0; i < pointPredicates.size(); i++) {
Expression predicate = pointPredicates.get(i);
try {
// each expr will be expand to one or more points
Set<Range<TypedKey>> ranges = indexRangeBuilder.buildRange(predicate).asRanges();
List<Key> points = rangesToPoint(ranges);
resultKeys = joinKeys(resultKeys, points);
} catch (Exception e) {
throw new TiExpressionException(String.format("Error converting access points %s", predicate), e);
}
}
return resultKeys;
}
use of com.pingcap.tikv.expression.Expression in project tispark by pingcap.
the class AstBuilder method visitMathExpressionAtom.
@Override
public Expression visitMathExpressionAtom(MySqlParser.MathExpressionAtomContext ctx) {
Expression left = visitChildren(ctx.left);
Expression right = visitChildren(ctx.right);
switch(ctx.mathOperator().getText()) {
case "+":
return ArithmeticBinaryExpression.plus(left, right);
case "-":
return ArithmeticBinaryExpression.minus(left, right);
case "*":
return ArithmeticBinaryExpression.multiply(left, right);
case "/":
case "div":
return ArithmeticBinaryExpression.divide(left, right);
}
throw new UnsupportedSyntaxException(ctx.toString() + ": it is not supported right now");
}
Aggregations