use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by axbaretto.
the class StreamingAggBatch method createAggregatorInternal.
private StreamingAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException, IOException {
ClassGenerator<StreamingAggregator> cg = CodeGenerator.getRoot(StreamingAggTemplate.TEMPLATE_DEFINITION, context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
container.clear();
LogicalExpression[] keyExprs = new LogicalExpression[popConfig.getKeys().size()];
LogicalExpression[] valueExprs = new LogicalExpression[popConfig.getExprs().size()];
TypedFieldId[] keyOutputIds = new TypedFieldId[popConfig.getKeys().size()];
ErrorCollector collector = new ErrorCollectorImpl();
for (int i = 0; i < keyExprs.length; i++) {
final NamedExpression ne = popConfig.getKeys().get(i);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr == null) {
continue;
}
keyExprs[i] = expr;
final MaterializedField outputField = MaterializedField.create(ne.getRef().getLastSegment().getNameSegment().getPath(), expr.getMajorType());
@SuppressWarnings("resource") final ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
keyOutputIds[i] = container.add(vector);
}
for (int i = 0; i < valueExprs.length; i++) {
final NamedExpression ne = popConfig.getExprs().get(i);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr instanceof IfExpression) {
throw UserException.unsupportedError(new UnsupportedOperationException("Union type not supported in aggregate functions")).build(logger);
}
if (expr == null) {
continue;
}
final MaterializedField outputField = MaterializedField.create(ne.getRef().getLastSegment().getNameSegment().getPath(), expr.getMajorType());
@SuppressWarnings("resource") ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
TypedFieldId id = container.add(vector);
valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);
}
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
setupIsSame(cg, keyExprs);
setupIsSameApart(cg, keyExprs);
addRecordValues(cg, valueExprs);
outputRecordKeys(cg, keyOutputIds, keyExprs);
outputRecordKeysPrev(cg, keyOutputIds, keyExprs);
cg.getBlock("resetValues")._return(JExpr.TRUE);
getIndex(cg);
container.buildSchema(SelectionVectorMode.NONE);
StreamingAggregator agg = context.getImplementationClass(cg);
agg.setup(oContext, incoming, this);
return agg;
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by axbaretto.
the class TopNBatch method createNewPriorityQueue.
public static PriorityQueue createNewPriorityQueue(MappingSet mainMapping, MappingSet leftMapping, MappingSet rightMapping, OptionSet optionSet, FunctionLookupContext functionLookupContext, CodeCompiler codeCompiler, List<Ordering> orderings, VectorAccessible batch, boolean unionTypeEnabled, boolean codegenDump, int limit, BufferAllocator allocator, SelectionVectorMode mode) throws ClassTransformationException, IOException, SchemaChangeException {
CodeGenerator<PriorityQueue> cg = CodeGenerator.get(PriorityQueue.TEMPLATE_DEFINITION, optionSet);
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
cg.saveCodeForDebugging(codegenDump);
ClassGenerator<PriorityQueue> g = cg.getRoot();
g.setMappingSet(mainMapping);
for (Ordering od : orderings) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, functionLookupContext, unionTypeEnabled);
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
g.setMappingSet(leftMapping);
HoldingContainer left = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(rightMapping);
HoldingContainer right = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(mainMapping);
// next we wrap the two comparison sides and add the expression block for the comparison.
LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, functionLookupContext);
HoldingContainer out = g.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
} else {
jc._then()._return(out.getValue().minus());
}
g.rotateBlock();
}
g.rotateBlock();
g.getEvalBlock()._return(JExpr.lit(0));
PriorityQueue q = codeCompiler.createInstance(cg);
q.init(limit, allocator, mode == BatchSchema.SelectionVectorMode.TWO_BYTE);
return q;
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by axbaretto.
the class FilterRecordBatch method generateSV2Filterer.
protected Filterer generateSV2Filterer() throws SchemaChangeException {
final ErrorCollector collector = new ErrorCollectorImpl();
final List<TransferPair> transfers = Lists.newArrayList();
final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION2, context.getOptions());
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry(), false, unionTypeEnabled);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
cg.addExpr(new ReturnValueExpression(expr), ClassGenerator.BlkCreateMode.FALSE);
for (final VectorWrapper<?> v : incoming) {
final TransferPair pair = v.getValueVector().makeTransferPair(container.addOrGet(v.getField(), callBack));
transfers.add(pair);
}
try {
final TransferPair[] tx = transfers.toArray(new TransferPair[transfers.size()]);
CodeGenerator<Filterer> codeGen = cg.getCodeGenerator();
codeGen.plainJavaCapable(true);
final Filterer filter = context.getImplementationClass(codeGen);
filter.setup(context, incoming, this, tx);
return filter;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by axbaretto.
the class PartitionSenderRootExec method createClassInstances.
private List<Partitioner> createClassInstances(int actualPartitions) throws SchemaChangeException {
// set up partitioning function
final LogicalExpression expr = operator.getExpr();
final ErrorCollector collector = new ErrorCollectorImpl();
final ClassGenerator<Partitioner> cg;
cg = CodeGenerator.getRoot(Partitioner.TEMPLATE_DEFINITION, context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
ClassGenerator<Partitioner> cgInner = cg.getInnerGenerator("OutgoingRecordBatch");
final LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, incoming, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
// generate code to copy from an incoming value vector to the destination partition's outgoing value vector
JExpression bucket = JExpr.direct("bucket");
// generate evaluate expression to determine the hash
ClassGenerator.HoldingContainer exprHolder = cg.addExpr(materializedExpr);
cg.getEvalBlock().decl(JType.parse(cg.getModel(), "int"), "bucket", exprHolder.getValue().mod(JExpr.lit(outGoingBatchCount)));
cg.getEvalBlock()._return(cg.getModel().ref(Math.class).staticInvoke("abs").arg(bucket));
CopyUtil.generateCopies(cgInner, incoming, incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE);
try {
// compile and setup generated code
List<Partitioner> subPartitioners = context.getImplementationClass(cg, actualPartitions);
return subPartitioners;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by axbaretto.
the class JoinUtils method addLeastRestrictiveCasts.
/**
* Utility method used by joins to add implicit casts on one of the sides of the join condition in case the two
* expressions have different types.
* @param leftExpressions array of expressions from left input into the join
* @param leftBatch left input record batch
* @param rightExpressions array of expressions from right input into the join
* @param rightBatch right input record batch
* @param context fragment context
*/
public static void addLeastRestrictiveCasts(LogicalExpression[] leftExpressions, VectorAccessible leftBatch, LogicalExpression[] rightExpressions, VectorAccessible rightBatch, FragmentContext context) {
assert rightExpressions.length == leftExpressions.length;
for (int i = 0; i < rightExpressions.length; i++) {
LogicalExpression rightExpression = rightExpressions[i];
LogicalExpression leftExpression = leftExpressions[i];
TypeProtos.MinorType rightType = rightExpression.getMajorType().getMinorType();
TypeProtos.MinorType leftType = leftExpression.getMajorType().getMinorType();
if (rightType == TypeProtos.MinorType.UNION || leftType == TypeProtos.MinorType.UNION) {
continue;
}
if (rightType != leftType) {
// currently we only support implicit casts if the input types are numeric or varchar/varbinary
if (!allowImplicitCast(rightType, leftType)) {
throw new DrillRuntimeException(String.format("Join only supports implicit casts between " + "1. Numeric data\n 2. Varchar, Varbinary data 3. Date, Timestamp data " + "Left type: %s, Right type: %s. Add explicit casts to avoid this error", leftType, rightType));
}
// We need to add a cast to one of the expressions
List<TypeProtos.MinorType> types = new LinkedList<>();
types.add(rightType);
types.add(leftType);
TypeProtos.MinorType result = TypeCastRules.getLeastRestrictiveType(types);
ErrorCollector errorCollector = new ErrorCollectorImpl();
if (result == null) {
throw new DrillRuntimeException(String.format("Join conditions cannot be compared failing left " + "expression:" + " %s failing right expression: %s", leftExpression.getMajorType().toString(), rightExpression.getMajorType().toString()));
} else if (result != rightType) {
// Add a cast expression on top of the right expression
LogicalExpression castExpr = ExpressionTreeMaterializer.addCastExpression(rightExpression, leftExpression.getMajorType(), context.getFunctionRegistry(), errorCollector);
// Store the newly casted expression
rightExpressions[i] = ExpressionTreeMaterializer.materialize(castExpr, rightBatch, errorCollector, context.getFunctionRegistry());
} else if (result != leftType) {
// Add a cast expression on top of the left expression
LogicalExpression castExpr = ExpressionTreeMaterializer.addCastExpression(leftExpression, rightExpression.getMajorType(), context.getFunctionRegistry(), errorCollector);
// store the newly casted expression
leftExpressions[i] = ExpressionTreeMaterializer.materialize(castExpr, leftBatch, errorCollector, context.getFunctionRegistry());
}
}
}
}
Aggregations