use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class FlattenRecordBatch method setupNewSchema.
@Override
protected boolean setupNewSchema() {
allocationVectors = new ArrayList<>();
container.clear();
List<NamedExpression> exprs = getExpressionList();
ErrorCollector collector = new ErrorCollectorImpl();
List<TransferPair> transfers = new ArrayList<>();
ClassGenerator<Flattener> cg = CodeGenerator.getRoot(Flattener.TEMPLATE_DEFINITION, context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
IntHashSet transferFieldIds = new IntHashSet();
NamedExpression flattenExpr = new NamedExpression(popConfig.getColumn(), new FieldReference(popConfig.getColumn()));
ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) ExpressionTreeMaterializer.materialize(flattenExpr.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
FieldReference fieldReference = flattenExpr.getRef();
TransferPair transferPair = getFlattenFieldTransferPair(fieldReference);
if (transferPair != null) {
ValueVector flattenVector = transferPair.getTo();
// checks that list has only default ValueVector and replaces resulting ValueVector to INT typed ValueVector
if (exprs.size() == 0 && flattenVector.getField().getType().equals(Types.LATE_BIND_TYPE)) {
MaterializedField outputField = MaterializedField.create(fieldReference.getAsNamePart().getName(), Types.OPTIONAL_INT);
ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
container.add(vector);
} else {
transfers.add(transferPair);
container.add(flattenVector);
transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);
}
}
logger.debug("Added transfer for project expression.");
ClassifierResult result = new ClassifierResult();
for (NamedExpression namedExpression : exprs) {
result.clear();
String outputName = getRef(namedExpression).getRootSegment().getPath();
if (result != null && result.outputNames != null && result.outputNames.size() > 0) {
for (int j = 0; j < result.outputNames.size(); j++) {
if (!result.outputNames.get(j).equals(EMPTY_STRING)) {
outputName = result.outputNames.get(j);
break;
}
}
}
LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
collector.reportErrors(logger);
if (expr instanceof DrillFuncHolderExpr && ((DrillFuncHolderExpr) expr).getHolder().isComplexWriterFuncHolder()) {
// Lazy initialization of the list of complex writers, if not done yet.
if (complexWriters == null) {
complexWriters = new ArrayList<>();
}
// The reference name will be passed to ComplexWriter, used as the name of the output vector from the writer.
((DrillFuncHolderExpr) expr).setFieldReference(namedExpression.getRef());
cg.addExpr(expr);
} else {
// need to do evaluation.
MaterializedField outputField;
if (expr instanceof ValueVectorReadExpression) {
TypedFieldId id = ValueVectorReadExpression.class.cast(expr).getFieldId();
ValueVector incomingVector = incoming.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
// when the first batch will be empty.
if (incomingVector != null) {
outputField = incomingVector.getField().clone();
} else {
outputField = MaterializedField.create(outputName, expr.getMajorType());
}
} else {
outputField = MaterializedField.create(outputName, expr.getMajorType());
}
ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
allocationVectors.add(vector);
TypedFieldId fid = container.add(vector);
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
cg.addExpr(write);
logger.debug("Added eval for project expression.");
}
}
cg.rotateBlock();
cg.getEvalBlock()._return(JExpr.TRUE);
container.buildSchema(SelectionVectorMode.NONE);
try {
flattener = context.getImplementationClass(cg.getCodeGenerator());
flattener.setup(context, incoming, this, transfers);
} catch (SchemaChangeException e) {
throw schemaChangeException(e, logger);
}
return true;
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class StreamingAggBatch method createAggregatorInternal.
protected StreamingAggregator createAggregatorInternal() {
ClassGenerator<StreamingAggregator> cg = CodeGenerator.getRoot(StreamingAggTemplate.TEMPLATE_DEFINITION, context.getOptions());
// Streaming agg no longer plain Java capable. Stats generates code
// that fails when compiled normally.
// cannot override resetValues() in org.apache.drill.exec.physical.impl.aggregate.StreamingAggTemplate
// public boolean resetValues()
// ^
// overridden method does not throw org.apache.drill.exec.exception.SchemaChangeException (compiler.err.override.meth.doesnt.throw)
// cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
container.clear();
LogicalExpression[] keyExprs = new LogicalExpression[getKeyExpressions().size()];
LogicalExpression[] valueExprs = new LogicalExpression[getValueExpressions().size()];
TypedFieldId[] keyOutputIds = new TypedFieldId[getKeyExpressions().size()];
ErrorCollector collector = new ErrorCollectorImpl();
for (int i = 0; i < keyExprs.length; i++) {
NamedExpression ne = getKeyExpressions().get(i);
LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr == null) {
continue;
}
keyExprs[i] = expr;
MaterializedField outputField = MaterializedField.create(ne.getRef().getLastSegment().getNameSegment().getPath(), expr.getMajorType());
container.addOrGet(outputField);
keyOutputIds[i] = container.getValueVectorId(ne.getRef());
}
for (int i = 0; i < valueExprs.length; i++) {
NamedExpression ne = getValueExpressions().get(i);
LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry(), true, false);
if (expr instanceof IfExpression) {
throw UserException.unsupportedError(new UnsupportedOperationException("Union type not supported in aggregate functions")).build(logger);
}
if (expr == null) {
continue;
}
// Populate the complex writers for complex exprs
if (expr instanceof DrillFuncHolderExpr && ((DrillFuncHolderExpr) expr).getHolder().isComplexWriterFuncHolder()) {
// Lazy initialization of the list of complex writers, if not done yet.
if (complexWriters == null) {
complexWriters = Lists.newArrayList();
} else {
complexWriters.clear();
}
// The reference name will be passed to ComplexWriter, used as the name of the output vector from the writer.
((DrillFuncHolderExpr) expr).setFieldReference(ne.getRef());
MaterializedField field = MaterializedField.create(ne.getRef().getAsNamePart().getName(), UntypedNullHolder.TYPE);
container.add(new UntypedNullVector(field, container.getAllocator()));
valueExprs[i] = expr;
} else {
MaterializedField outputField = MaterializedField.create(ne.getRef().getLastSegment().getNameSegment().getPath(), expr.getMajorType());
container.addOrGet(outputField);
TypedFieldId id = container.getValueVectorId(ne.getRef());
valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);
}
}
collector.reportErrors(logger);
setupIsSame(cg, keyExprs);
setupIsSameApart(cg, keyExprs);
addRecordValues(cg, valueExprs);
outputRecordKeys(cg, keyOutputIds, keyExprs);
outputRecordKeysPrev(cg, keyOutputIds, keyExprs);
cg.getBlock("resetValues")._return(JExpr.TRUE);
getIndex(cg);
container.buildSchema(SelectionVectorMode.NONE);
StreamingAggregator agg = context.getImplementationClass(cg);
try {
agg.setup(oContext, incoming, this, maxOutputRowCount);
} catch (SchemaChangeException e) {
throw schemaChangeException(e, logger);
}
allocateComplexWriters();
return agg;
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class MergingRecordBatch method generateComparisons.
private void generateComparisons(final ClassGenerator<?> g, final VectorAccessible batch) throws SchemaChangeException {
g.setMappingSet(MAIN_MAPPING);
for (final Ordering od : popConfig.getOrderings()) {
// first, we rewrite the evaluation stack for each side of the comparison.
final ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
g.setMappingSet(LEFT_MAPPING);
final HoldingContainer left = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(RIGHT_MAPPING);
final HoldingContainer right = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(MAIN_MAPPING);
// next we wrap the two comparison sides and add the expression block for the comparison.
final LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, context.getFunctionRegistry());
final HoldingContainer out = g.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
final JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
} else {
jc._then()._return(out.getValue().minus());
}
}
g.getEvalBlock()._return(JExpr.lit(0));
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class OrderedPartitionRecordBatch method setupNewSchema.
/**
* Sets up projection that will transfer all of the columns in batch, and also
* populate the partition column based on which partition a record falls into
* in the partition table
*
* @param batch
*/
protected void setupNewSchema(VectorAccessible batch) {
container.clear();
ErrorCollector collector = new ErrorCollectorImpl();
List<TransferPair> transfers = Lists.newArrayList();
ClassGenerator<OrderedPartitionProjector> cg = CodeGenerator.getRoot(OrderedPartitionProjector.TEMPLATE_DEFINITION, context.getOptions());
for (VectorWrapper<?> vw : batch) {
TransferPair tp = vw.getValueVector().getTransferPair(oContext.getAllocator());
transfers.add(tp);
container.add(tp.getTo());
}
cg.setMappingSet(mainMapping);
int count = 0;
for (Ordering od : popConfig.getOrderings()) {
LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
collector.reportErrors(logger);
cg.setMappingSet(incomingMapping);
ClassGenerator.HoldingContainer left = cg.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
cg.setMappingSet(partitionMapping);
TypedFieldId fieldId = new TypedFieldId.Builder().finalType(expr.getMajorType()).addId(count++).build();
ClassGenerator.HoldingContainer right = cg.addExpr(new ValueVectorReadExpression(fieldId), ClassGenerator.BlkCreateMode.FALSE);
cg.setMappingSet(mainMapping);
// next we wrap the two comparison sides and add the expression block for the comparison.
LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, context.getFunctionRegistry());
ClassGenerator.HoldingContainer out = cg.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
JConditional jc = cg.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
} else {
jc._then()._return(out.getValue().minus());
}
}
cg.getEvalBlock()._return(JExpr.lit(0));
container.add(this.partitionKeyVector);
container.buildSchema(batch.getSchema().getSelectionVectorMode());
projector = context.getImplementationClass(cg);
try {
projector.setup(context, batch, this, transfers, partitionVectors, partitions, popConfig.getRef());
} catch (SchemaChangeException e) {
throw UserException.schemaChangeError(e).addContext("Unexpected schema change in the Ordered Partitioner").build(logger);
}
}
use of org.apache.drill.common.expression.ErrorCollectorImpl in project drill by apache.
the class ExpressionTest method getExpressionCode.
private String getExpressionCode(String expression, RecordBatch batch) throws Exception {
final LogicalExpression expr = parseExpr(expression);
final ErrorCollector error = new ErrorCollectorImpl();
final LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, batch, error, registry);
if (error.getErrorCount() != 0) {
logger.error("Failure while materializing expression [{}]. Errors: {}", expression, error);
assertEquals(0, error.getErrorCount());
}
FunctionImplementationRegistry funcReg = new FunctionImplementationRegistry(DrillConfig.create());
final ClassGenerator<Projector> cg = CodeGenerator.get(Projector.TEMPLATE_DEFINITION, null).getRoot();
TypedFieldId fieldId = new TypedFieldId.Builder().finalType(materializedExpr.getMajorType()).addId(-1).build();
cg.addExpr(new ValueVectorWriteExpression(fieldId, materializedExpr));
return cg.getCodeGenerator().generateAndGet();
}
Aggregations