use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class ParquetFilterBuilder method visitFunctionHolderExpression.
@Override
public LogicalExpression visitFunctionHolderExpression(FunctionHolderExpression funcHolderExpr, Set<LogicalExpression> value) throws RuntimeException {
FuncHolder holder = funcHolderExpr.getHolder();
if (!(holder instanceof DrillSimpleFuncHolder)) {
return null;
}
if (value.contains(funcHolderExpr)) {
ValueHolder result;
try {
result = InterpreterEvaluator.evaluateConstantExpr(udfUtilities, funcHolderExpr);
} catch (Exception e) {
logger.warn("Error in evaluating function of {}", funcHolderExpr.getName());
return null;
}
logger.debug("Reduce a constant function expression into a value expression");
return getValueExpressionFromConst(result, funcHolderExpr.getMajorType().getMinorType());
}
final String funcName = ((DrillSimpleFuncHolder) holder).getRegisteredNames()[0];
if (isCompareFunction(funcName)) {
return handleCompareFunction(funcHolderExpr, value);
}
if (CastFunctions.isCastFunction(funcName)) {
List<LogicalExpression> newArgs = new ArrayList();
for (LogicalExpression arg : funcHolderExpr.args) {
final LogicalExpression newArg = arg.accept(this, value);
if (newArg == null) {
return null;
}
newArgs.add(newArg);
}
return funcHolderExpr.copy(newArgs);
} else {
return null;
}
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class TestParquetFilterPushDown method testParquetRowGroupFilterEval.
private void testParquetRowGroupFilterEval(final ParquetMetadata footer, final String exprStr, boolean canDropExpected) throws Exception {
final LogicalExpression filterExpr = parseExpr(exprStr);
testParquetRowGroupFilterEval(footer, 0, filterExpr, canDropExpected);
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class OperatorCodeGenerator method generateComparisons.
protected void generateComparisons(ClassGenerator<?> g, VectorAccessible batch) {
g.setMappingSet(MAIN_MAPPING);
for (Ordering od : popConfig.getOrderings()) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw UserException.unsupportedError().message("Failure while materializing expression. " + collector.toErrorString()).build(logger);
}
g.setMappingSet(LEFT_MAPPING);
HoldingContainer left = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(RIGHT_MAPPING);
HoldingContainer right = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(MAIN_MAPPING);
// next we wrap the two comparison sides and add the expression block for the comparison.
LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, context.getFunctionRegistry());
HoldingContainer out = g.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
} else {
jc._then()._return(out.getValue().minus());
}
g.rotateBlock();
}
g.rotateBlock();
g.getEvalBlock()._return(JExpr.lit(0));
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class OperatorCodeGenerator method createNewMSorter.
private MSorter createNewMSorter(List<Ordering> orderings, VectorAccessible batch, MappingSet mainMapping, MappingSet leftMapping, MappingSet rightMapping) {
CodeGenerator<MSorter> cg = CodeGenerator.get(MSorter.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.saveCodeForDebugging(true);
ClassGenerator<MSorter> g = cg.getRoot();
g.setMappingSet(mainMapping);
for (Ordering od : orderings) {
// first, we rewrite the evaluation stack for each side of the comparison.
ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw UserException.unsupportedError().message("Failure while materializing expression. " + collector.toErrorString()).build(logger);
}
g.setMappingSet(leftMapping);
HoldingContainer left = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(rightMapping);
HoldingContainer right = g.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
g.setMappingSet(mainMapping);
// next we wrap the two comparison sides and add the expression block for the comparison.
LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, context.getFunctionRegistry());
HoldingContainer out = g.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
} else {
jc._then()._return(out.getValue().minus());
}
g.rotateBlock();
}
g.rotateBlock();
g.getEvalBlock()._return(JExpr.lit(0));
return getInstance(cg);
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class UnionAllRecordBatch method doWork.
@SuppressWarnings("resource")
private IterOutcome doWork() throws ClassTransformationException, IOException, SchemaChangeException {
if (allocationVectors != null) {
for (ValueVector v : allocationVectors) {
v.clear();
}
}
allocationVectors = Lists.newArrayList();
transfers.clear();
// If both sides of Union-All are empty
if (unionAllInput.isBothSideEmpty()) {
for (int i = 0; i < outputFields.size(); ++i) {
final String colName = outputFields.get(i).getPath();
final MajorType majorType = MajorType.newBuilder().setMinorType(MinorType.INT).setMode(DataMode.OPTIONAL).build();
MaterializedField outputField = MaterializedField.create(colName, majorType);
ValueVector vv = container.addOrGet(outputField, callBack);
allocationVectors.add(vv);
}
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
return IterOutcome.OK_NEW_SCHEMA;
}
final ClassGenerator<UnionAller> cg = CodeGenerator.getRoot(UnionAller.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
int index = 0;
for (VectorWrapper<?> vw : current) {
ValueVector vvIn = vw.getValueVector();
// get the original input column names
SchemaPath inputPath = SchemaPath.getSimplePath(vvIn.getField().getPath());
// get the renamed column names
SchemaPath outputPath = SchemaPath.getSimplePath(outputFields.get(index).getPath());
final ErrorCollector collector = new ErrorCollectorImpl();
// cast data types (Minortype or DataMode)
if (hasSameTypeAndMode(outputFields.get(index), vw.getValueVector().getField())) {
// Transfer column
MajorType outputFieldType = outputFields.get(index).getType();
MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), outputFieldType);
/*
todo: Fix if condition when DRILL-4824 is merged
If condition should be changed to:
`if (outputFields.get(index).getPath().equals(inputPath.getAsUnescapedPath())) {`
DRILL-5419 has changed condition to correct one but this caused regression (DRILL-5521).
Root cause is missing indication of child column in map types when it is null.
DRILL-4824 is re-working json reader implementation, including map types and will fix this problem.
Reverting condition to previous one to avoid regression till DRILL-4824 is merged.
Unit test - TestJsonReader.testKvgenWithUnionAll().
*/
if (outputFields.get(index).getPath().equals(inputPath)) {
ValueVector vvOut = container.addOrGet(outputField);
TransferPair tp = vvIn.makeTransferPair(vvOut);
transfers.add(tp);
// Copy data in order to rename the column
} else {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, current, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
ValueVector vv = container.addOrGet(outputField, callBack);
allocationVectors.add(vv);
TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
cg.addExpr(write);
}
// Cast is necessary
} else {
LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, current, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
// cast to the one with the least restriction
if (vvIn.getField().getType().getMode() == DataMode.REQUIRED && outputFields.get(index).getType().getMode() != DataMode.REQUIRED) {
expr = ExpressionTreeMaterializer.convertToNullableType(expr, vvIn.getField().getType().getMinorType(), context.getFunctionRegistry(), collector);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
}
// Insert a cast before the Union operation
if (vvIn.getField().getType().getMinorType() != outputFields.get(index).getType().getMinorType()) {
expr = ExpressionTreeMaterializer.addCastExpression(expr, outputFields.get(index).getType(), context.getFunctionRegistry(), collector);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
}
final MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), expr.getMajorType());
ValueVector vector = container.addOrGet(outputField, callBack);
allocationVectors.add(vector);
TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
boolean useSetSafe = !(vector instanceof FixedWidthVector);
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);
cg.addExpr(write);
}
++index;
}
unionall = context.getImplementationClass(cg.getCodeGenerator());
unionall.setup(context, current, this, transfers);
if (!schemaAvailable) {
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
schemaAvailable = true;
}
if (!doAlloc()) {
return IterOutcome.OUT_OF_MEMORY;
}
recordCount = unionall.unionRecords(0, current.getRecordCount(), 0);
setValueCount(recordCount);
return IterOutcome.OK;
}
Aggregations