Search in sources :

Example 31 with SchemaPath

use of org.apache.drill.common.expression.SchemaPath in project drill by apache.

the class CompareFunctionsProcessor method process.

public static CompareFunctionsProcessor process(FunctionCall call, boolean nullComparatorSupported) {
    String functionName = call.getName();
    LogicalExpression nameArg = call.args.get(0);
    LogicalExpression valueArg = call.args.size() >= 2 ? call.args.get(1) : null;
    CompareFunctionsProcessor evaluator = new CompareFunctionsProcessor(functionName);
    if (valueArg != null) {
        // binary function
        if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) {
            LogicalExpression swapArg = valueArg;
            valueArg = nameArg;
            nameArg = swapArg;
            evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName);
        }
        evaluator.success = nameArg.accept(evaluator, valueArg);
    } else if (nullComparatorSupported && call.args.get(0) instanceof SchemaPath) {
        evaluator.success = true;
        evaluator.path = (SchemaPath) nameArg;
    }
    return evaluator;
}
Also used : LogicalExpression(org.apache.drill.common.expression.LogicalExpression) SchemaPath(org.apache.drill.common.expression.SchemaPath) QuotedString(org.apache.drill.common.expression.ValueExpressions.QuotedString)

Example 32 with SchemaPath

use of org.apache.drill.common.expression.SchemaPath in project drill by apache.

the class MaprDBJsonRecordReader method transformColumns.

@Override
protected Collection<SchemaPath> transformColumns(Collection<SchemaPath> columns) {
    Set<SchemaPath> transformed = Sets.newLinkedHashSet();
    if (disablePushdown) {
        transformed.add(AbstractRecordReader.STAR_COLUMN);
        includeId = true;
        return transformed;
    }
    if (isStarQuery()) {
        transformed.add(AbstractRecordReader.STAR_COLUMN);
        includeId = true;
        if (isSkipQuery()) {
            // `SELECT COUNT(*)` query
            if (!disableCountOptimization) {
                projectedFields = new FieldPath[1];
                projectedFields[0] = ID_FIELD;
            }
        }
        return transformed;
    }
    Set<FieldPath> projectedFieldsSet = Sets.newTreeSet();
    for (SchemaPath column : columns) {
        if (column.getRootSegment().getPath().equalsIgnoreCase(ID_KEY)) {
            includeId = true;
            if (!disableCountOptimization) {
                projectedFieldsSet.add(ID_FIELD);
            }
        } else {
            projectedFieldsSet.add(getFieldPathForProjection(column));
        }
        transformed.add(column);
    }
    if (projectedFieldsSet.size() > 0) {
        projectedFields = projectedFieldsSet.toArray(new FieldPath[projectedFieldsSet.size()]);
    }
    if (disableCountOptimization) {
        idOnly = (projectedFields == null);
    }
    return transformed;
}
Also used : SchemaPath(org.apache.drill.common.expression.SchemaPath) FieldPath(org.ojai.FieldPath)

Example 33 with SchemaPath

use of org.apache.drill.common.expression.SchemaPath in project drill by apache.

the class CompareFunctionsProcessor method process.

public static CompareFunctionsProcessor process(FunctionCall call, boolean nullComparatorSupported) {
    String functionName = call.getName();
    LogicalExpression nameArg = call.args.get(0);
    LogicalExpression valueArg = call.args.size() >= 2 ? call.args.get(1) : null;
    CompareFunctionsProcessor evaluator = new CompareFunctionsProcessor(functionName);
    if (valueArg != null) {
        // binary function
        if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) {
            LogicalExpression swapArg = valueArg;
            valueArg = nameArg;
            nameArg = swapArg;
            evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName);
        }
        evaluator.success = nameArg.accept(evaluator, valueArg);
    } else if (nullComparatorSupported && call.args.get(0) instanceof SchemaPath) {
        evaluator.success = true;
        evaluator.path = (SchemaPath) nameArg;
    }
    return evaluator;
}
Also used : LogicalExpression(org.apache.drill.common.expression.LogicalExpression) SchemaPath(org.apache.drill.common.expression.SchemaPath) QuotedString(org.apache.drill.common.expression.ValueExpressions.QuotedString)

Example 34 with SchemaPath

use of org.apache.drill.common.expression.SchemaPath in project drill by apache.

the class JsonConditionBuilder method createJsonScanSpec.

private JsonScanSpec createJsonScanSpec(FunctionCall call, CompareFunctionsProcessor processor) {
    String functionName = processor.getFunctionName();
    SchemaPath field = processor.getPath();
    Value fieldValue = processor.getValue();
    QueryCondition cond = null;
    switch(functionName) {
        case "equal":
            cond = MapRDB.newCondition();
            setIsCondition(cond, field.getAsUnescapedPath(), Op.EQUAL, fieldValue);
            cond.build();
            break;
        case "not_equal":
            cond = MapRDB.newCondition();
            setIsCondition(cond, field.getAsUnescapedPath(), Op.NOT_EQUAL, fieldValue);
            cond.build();
            break;
        case "less_than":
            cond = MapRDB.newCondition();
            setIsCondition(cond, field.getAsUnescapedPath(), Op.LESS, fieldValue);
            cond.build();
            break;
        case "less_than_or_equal_to":
            cond = MapRDB.newCondition();
            setIsCondition(cond, field.getAsUnescapedPath(), Op.LESS_OR_EQUAL, fieldValue);
            cond.build();
            break;
        case "greater_than":
            cond = MapRDB.newCondition();
            setIsCondition(cond, field.getAsUnescapedPath(), Op.GREATER, fieldValue);
            cond.build();
            break;
        case "greater_than_or_equal_to":
            cond = MapRDB.newCondition();
            setIsCondition(cond, field.getAsUnescapedPath(), Op.GREATER_OR_EQUAL, fieldValue);
            cond.build();
            break;
        case "isnull":
            cond = MapRDB.newCondition().notExists(field.getAsUnescapedPath()).build();
            break;
        case "isnotnull":
            cond = MapRDB.newCondition().exists(field.getAsUnescapedPath()).build();
            break;
        case "istrue":
            cond = MapRDB.newCondition().is(field.getAsUnescapedPath(), Op.EQUAL, true).build();
            break;
        case "isnotfalse":
            cond = MapRDB.newCondition().is(field.getAsUnescapedPath(), Op.NOT_EQUAL, false).build();
            break;
        case "isfalse":
            cond = MapRDB.newCondition().is(field.getAsUnescapedPath(), Op.EQUAL, false).build();
            break;
        case "isnottrue":
            cond = MapRDB.newCondition().is(field.getAsUnescapedPath(), Op.NOT_EQUAL, true).build();
            break;
        case "like":
            cond = MapRDB.newCondition().like(field.getAsUnescapedPath(), fieldValue.getString()).build();
            break;
        default:
    }
    if (cond != null) {
        return new JsonScanSpec(groupScan.getTableName(), cond);
    }
    return null;
}
Also used : SchemaPath(org.apache.drill.common.expression.SchemaPath) Value(org.ojai.Value) QueryCondition(org.ojai.store.QueryCondition)

Example 35 with SchemaPath

use of org.apache.drill.common.expression.SchemaPath in project drill by apache.

the class SequenceFileFormatPlugin method getRecordReader.

@Override
public RecordReader getRecordReader(FragmentContext context, DrillFileSystem dfs, FileWork fileWork, List<SchemaPath> columns, String userName) throws ExecutionSetupException {
    final Path path = dfs.makeQualified(new Path(fileWork.getPath()));
    final FileSplit split = new FileSplit(path, fileWork.getStart(), fileWork.getLength(), new String[] { "" });
    return new SequenceFileRecordReader(split, dfs, context.getQueryUserName(), userName);
}
Also used : SchemaPath(org.apache.drill.common.expression.SchemaPath) Path(org.apache.hadoop.fs.Path) FileSplit(org.apache.hadoop.mapred.FileSplit)

Aggregations

SchemaPath (org.apache.drill.common.expression.SchemaPath)74 Test (org.junit.Test)23 FunctionImplementationRegistry (org.apache.drill.exec.expr.fn.FunctionImplementationRegistry)17 FragmentContext (org.apache.drill.exec.ops.FragmentContext)16 PhysicalPlan (org.apache.drill.exec.physical.PhysicalPlan)15 FragmentRoot (org.apache.drill.exec.physical.base.FragmentRoot)15 PhysicalPlanReader (org.apache.drill.exec.planner.PhysicalPlanReader)15 LogicalExpression (org.apache.drill.common.expression.LogicalExpression)12 ExecTest (org.apache.drill.exec.ExecTest)12 BigIntVector (org.apache.drill.exec.vector.BigIntVector)9 ExecutionSetupException (org.apache.drill.common.exceptions.ExecutionSetupException)8 IntVector (org.apache.drill.exec.vector.IntVector)8 Path (org.apache.hadoop.fs.Path)8 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)7 ValueVector (org.apache.drill.exec.vector.ValueVector)7 IOException (java.io.IOException)6 ErrorCollector (org.apache.drill.common.expression.ErrorCollector)6 TypedFieldId (org.apache.drill.exec.record.TypedFieldId)6 ErrorCollectorImpl (org.apache.drill.common.expression.ErrorCollectorImpl)5 FieldReference (org.apache.drill.common.expression.FieldReference)5