use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class HashAggBatch method createAggregatorInternal.
protected HashAggregator createAggregatorInternal() {
CodeGenerator<HashAggregator> top = CodeGenerator.get(HashAggregator.TEMPLATE_DEFINITION, context.getOptions());
ClassGenerator<HashAggregator> cg = top.getRoot();
ClassGenerator<HashAggregator> cgInner = cg.getInnerGenerator("BatchHolder");
top.plainJavaCapable(true);
// Uncomment the following line to allow debugging of the template code
// top.saveCodeForDebugging(true);
container.clear();
numGroupByExprs = (getKeyExpressions() != null) ? getKeyExpressions().size() : 0;
numAggrExprs = (getValueExpressions() != null) ? getValueExpressions().size() : 0;
aggrExprs = new LogicalExpression[numAggrExprs];
groupByOutFieldIds = new TypedFieldId[numGroupByExprs];
aggrOutFieldIds = new TypedFieldId[numAggrExprs];
ErrorCollector collector = new ErrorCollectorImpl();
for (int i = 0; i < numGroupByExprs; i++) {
NamedExpression ne = getKeyExpressions().get(i);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr == null) {
continue;
}
final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsNamePart().getName(), expr.getMajorType());
ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
// add this group-by vector to the output container
groupByOutFieldIds[i] = container.add(vv);
columnMapping.put(outputField.getName(), ne.getExpr().toString().replace('`', ' ').trim());
}
// each of SUM, MAX and MIN gets an extra bigint column
int extraNonNullColumns = 0;
for (int i = 0; i < numAggrExprs; i++) {
NamedExpression ne = getValueExpressions().get(i);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr instanceof IfExpression) {
throw UserException.unsupportedError(new UnsupportedOperationException("Union type not supported in aggregate functions")).build(logger);
}
collector.reportErrors(logger);
if (expr == null) {
continue;
}
// Populate the complex writers for complex exprs
if (expr instanceof DrillFuncHolderExpr && ((DrillFuncHolderExpr) expr).getHolder().isComplexWriterFuncHolder()) {
if (complexWriters == null) {
complexWriters = new ArrayList<>();
} else {
complexWriters.clear();
}
// The reference name will be passed to ComplexWriter, used as the name of the output vector from the writer.
((DrillFuncHolderExpr) expr).setFieldReference(ne.getRef());
MaterializedField field = MaterializedField.create(ne.getRef().getAsNamePart().getName(), UntypedNullHolder.TYPE);
container.add(new UntypedNullVector(field, container.getAllocator()));
aggrExprs[i] = expr;
} else {
MaterializedField outputField = MaterializedField.create(ne.getRef().getAsNamePart().getName(), expr.getMajorType());
ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
aggrOutFieldIds[i] = container.add(vv);
aggrExprs[i] = new ValueVectorWriteExpression(aggrOutFieldIds[i], expr, true);
if (expr instanceof FunctionHolderExpression) {
String funcName = ((FunctionHolderExpression) expr).getName();
if (funcName.equals("sum") || funcName.equals("max") || funcName.equals("min")) {
extraNonNullColumns++;
}
List<LogicalExpression> args = ((FunctionCall) ne.getExpr()).args();
if (!args.isEmpty()) {
if (args.get(0) instanceof SchemaPath) {
columnMapping.put(outputField.getName(), ((SchemaPath) args.get(0)).getAsNamePart().getName());
} else if (args.get(0) instanceof FunctionCall) {
FunctionCall functionCall = (FunctionCall) args.get(0);
if (functionCall.arg(0) instanceof SchemaPath) {
columnMapping.put(outputField.getName(), ((SchemaPath) functionCall.arg(0)).getAsNamePart().getName());
}
}
}
} else {
columnMapping.put(outputField.getName(), ne.getRef().getAsNamePart().getName());
}
}
}
setupUpdateAggrValues(cgInner);
setupGetIndex(cg);
cg.getBlock("resetValues")._return(JExpr.TRUE);
container.buildSchema(SelectionVectorMode.NONE);
HashAggregator agg = context.getImplementationClass(top);
HashTableConfig htConfig = // TODO - fix the validator on this option
new HashTableConfig((int) context.getOptions().getOption(ExecConstants.MIN_HASH_TABLE_SIZE), HashTable.DEFAULT_LOAD_FACTOR, getKeyExpressions(), null, /* no probe exprs */
comparators);
agg.setup(popConfig, htConfig, context, oContext, incoming, this, aggrExprs, cgInner.getWorkspaceTypes(), cgInner, groupByOutFieldIds, this.container, extraNonNullColumns * 8);
return agg;
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class PartitionSenderRootExec method createClassInstances.
private List<Partitioner> createClassInstances(int actualPartitions) {
// set up partitioning function
final LogicalExpression expr = operator.getExpr();
final ErrorCollector collector = new ErrorCollectorImpl();
final ClassGenerator<Partitioner> cg;
cg = CodeGenerator.getRoot(Partitioner.TEMPLATE_DEFINITION, context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
ClassGenerator<Partitioner> cgInner = cg.getInnerGenerator("OutgoingRecordBatch");
final LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, incoming, collector, context.getFunctionRegistry());
collector.reportErrors(logger);
// generate code to copy from an incoming value vector to the destination partition's outgoing value vector
JExpression bucket = JExpr.direct("bucket");
// generate evaluate expression to determine the hash
ClassGenerator.HoldingContainer exprHolder = cg.addExpr(materializedExpr);
cg.getEvalBlock().decl(JType.parse(cg.getModel(), "int"), "bucket", exprHolder.getValue().mod(JExpr.lit(outGoingBatchCount)));
cg.getEvalBlock()._return(cg.getModel().ref(Math.class).staticInvoke("abs").arg(bucket));
CopyUtil.generateCopies(cgInner, incoming, incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE);
// compile and setup generated code
List<Partitioner> subPartitioners = context.getImplementationClass(cg, actualPartitions);
final int divisor = Math.max(1, outGoingBatchCount / actualPartitions);
final int longTail = outGoingBatchCount % actualPartitions;
int startIndex = 0;
int endIndex = 0;
boolean success = false;
try {
for (int i = 0; i < actualPartitions; i++) {
startIndex = endIndex;
endIndex = (i < actualPartitions - 1) ? startIndex + divisor : outGoingBatchCount;
if (i < longTail) {
endIndex++;
}
final OperatorStats partitionStats = new OperatorStats(stats, true);
subPartitioners.get(i).setup(context, incoming, popConfig, partitionStats, oContext, cgInner, startIndex, endIndex);
}
partitioner = new PartitionerDecorator(subPartitioners, stats, context);
for (int index = 0; index < terminations.size(); index++) {
partitioner.getOutgoingBatches(terminations.buffer[index]).terminate();
}
terminations.clear();
success = true;
} catch (SchemaChangeException e) {
throw AbstractRecordBatch.schemaChangeException(e, "Partition Sender", logger);
} finally {
if (!success) {
for (Partitioner p : subPartitioners) {
p.clear();
}
}
}
return subPartitioners;
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class PruneScanRule method materializePruneExpr.
protected LogicalExpression materializePruneExpr(RexNode pruneCondition, PlannerSettings settings, RelNode scanRel, VectorContainer container) {
// materialize the expression
logger.debug("Attempting to prune {}", pruneCondition);
final LogicalExpression expr = DrillOptiq.toDrill(new DrillParseContext(settings), scanRel, pruneCondition);
final ErrorCollectorImpl errors = new ErrorCollectorImpl();
LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, container, errors, optimizerContext.getFunctionRegistry());
// it's same as the type of output vector.
if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.REQUIRED) {
materializedExpr = ExpressionTreeMaterializer.convertToNullableType(materializedExpr, materializedExpr.getMajorType().getMinorType(), optimizerContext.getFunctionRegistry(), errors);
}
if (errors.getErrorCount() != 0) {
logger.warn("Failure while materializing expression [{}]. Errors: {}", expr, errors);
return null;
}
return materializedExpr;
}
Aggregations