Search in sources :

Example 36 with MaterializedField

use of org.apache.drill.exec.record.MaterializedField in project drill by apache.

the class HyperRowSetImpl method buildReader.

/**
   * Internal method to build the set of column readers needed for
   * this row set. Used when building a row set reader.
   * @param rowIndex object that points to the current row
   * @return an array of column readers: in the same order as the
   * (non-map) vectors.
   */
protected RowSetReader buildReader(HyperRowIndex rowIndex) {
    FlattenedSchema accessSchema = schema().flatAccess();
    AbstractColumnReader[] readers = new AbstractColumnReader[accessSchema.count()];
    for (int i = 0; i < readers.length; i++) {
        MaterializedField field = accessSchema.column(i);
        readers[i] = ColumnAccessorFactory.newReader(field.getType());
        HyperVectorWrapper<ValueVector> hvw = getHyperVector(i);
        readers[i].bind(rowIndex, field, new HyperVectorAccessor(hvw, rowIndex));
    }
    return new RowSetReaderImpl(accessSchema, rowIndex, readers);
}
Also used : ValueVector(org.apache.drill.exec.vector.ValueVector) FlattenedSchema(org.apache.drill.test.rowSet.RowSetSchema.FlattenedSchema) MaterializedField(org.apache.drill.exec.record.MaterializedField) AbstractColumnReader(org.apache.drill.exec.vector.accessor.impl.AbstractColumnReader)

Example 37 with MaterializedField

use of org.apache.drill.exec.record.MaterializedField in project drill by apache.

the class DrillColumnMetaDataList method updateColumnMetaData.

public void updateColumnMetaData(String catalogName, String schemaName, String tableName, BatchSchema schema, List<Class<?>> getObjectClasses) {
    final List<ColumnMetaData> newColumns = new ArrayList<>(schema.getFieldCount());
    for (int colOffset = 0; colOffset < schema.getFieldCount(); colOffset++) {
        final MaterializedField field = schema.getColumn(colOffset);
        Class<?> objectClass = getObjectClasses.get(colOffset);
        final String columnName = field.getPath();
        final MajorType rpcDataType = field.getType();
        final AvaticaType bundledSqlDataType = getAvaticaType(rpcDataType);
        final String columnClassName = objectClass.getName();
        final int nullability;
        switch(field.getDataMode()) {
            case OPTIONAL:
                nullability = ResultSetMetaData.columnNullable;
                break;
            case REQUIRED:
                nullability = ResultSetMetaData.columnNoNulls;
                break;
            // Should REPEATED still map to columnNoNulls? or to columnNullable?
            case REPEATED:
                nullability = ResultSetMetaData.columnNoNulls;
                break;
            default:
                throw new AssertionError("Unexpected new DataMode value '" + field.getDataMode().name() + "'");
        }
        final boolean isSigned = Types.isJdbcSignedType(rpcDataType);
        // TODO(DRILL-3355):  TODO(DRILL-3356):  When string lengths, precisions,
        // interval kinds, etc., are available from RPC-level data, implement:
        // - precision for ResultSetMetadata.getPrecision(...) (like
        //   getColumns()'s COLUMN_SIZE)
        // - scale for getScale(...), and
        // - and displaySize for getColumnDisplaySize(...).
        final int precision = Types.getPrecision(rpcDataType);
        final int scale = Types.getScale(rpcDataType);
        final int displaySize = Types.getJdbcDisplaySize(rpcDataType);
        ColumnMetaData col = new ColumnMetaData(// (zero-based ordinal (for Java arrays/lists).)
        colOffset, false, /* autoIncrement */
        false, /* caseSensitive */
        true, /* searchable */
        false, /* currency */
        nullability, isSigned, displaySize, columnName, /* label */
        columnName, /* columnName */
        schemaName, precision, scale, tableName, catalogName, bundledSqlDataType, true, /* readOnly */
        false, /* writable */
        false, /* definitelyWritable */
        columnClassName);
        newColumns.add(col);
    }
    columns = newColumns;
}
Also used : MajorType(org.apache.drill.common.types.TypeProtos.MajorType) AvaticaType(org.apache.calcite.avatica.ColumnMetaData.AvaticaType) ArrayList(java.util.ArrayList) MaterializedField(org.apache.drill.exec.record.MaterializedField) ColumnMetaData(org.apache.calcite.avatica.ColumnMetaData)

Example 38 with MaterializedField

use of org.apache.drill.exec.record.MaterializedField in project drill by apache.

the class SchemaBuilder method add.

public SchemaBuilder add(String pathName, MajorType type) {
    MaterializedField col = MaterializedField.create(pathName, type);
    columns.add(col);
    return this;
}
Also used : MaterializedField(org.apache.drill.exec.record.MaterializedField)

Example 39 with MaterializedField

use of org.apache.drill.exec.record.MaterializedField in project drill by apache.

the class MapUtility method writeToMapFromReader.

/*
   * Function to read a value from the field reader, detect the type, construct the appropriate value holder
   * and use the value holder to write to the Map.
   */
// TODO : This should be templatized and generated using freemarker
public static void writeToMapFromReader(FieldReader fieldReader, BaseWriter.MapWriter mapWriter) {
    try {
        MajorType valueMajorType = fieldReader.getType();
        MinorType valueMinorType = valueMajorType.getMinorType();
        boolean repeated = false;
        if (valueMajorType.getMode() == TypeProtos.DataMode.REPEATED) {
            repeated = true;
        }
        switch(valueMinorType) {
            case TINYINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).tinyInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.tinyInt(MappifyUtility.fieldValue));
                }
                break;
            case SMALLINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).smallInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.smallInt(MappifyUtility.fieldValue));
                }
                break;
            case BIGINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).bigInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.bigInt(MappifyUtility.fieldValue));
                }
                break;
            case INT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).integer());
                } else {
                    fieldReader.copyAsValue(mapWriter.integer(MappifyUtility.fieldValue));
                }
                break;
            case UINT1:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt1());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt1(MappifyUtility.fieldValue));
                }
                break;
            case UINT2:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt2());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt2(MappifyUtility.fieldValue));
                }
                break;
            case UINT4:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt4());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt4(MappifyUtility.fieldValue));
                }
                break;
            case UINT8:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt8());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt8(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL9:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal9());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal9(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL18:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal18());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal18(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL28SPARSE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal28Sparse());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal28Sparse(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL38SPARSE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal38Sparse());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal38Sparse(MappifyUtility.fieldValue));
                }
                break;
            case DATE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).date());
                } else {
                    fieldReader.copyAsValue(mapWriter.date(MappifyUtility.fieldValue));
                }
                break;
            case TIME:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).time());
                } else {
                    fieldReader.copyAsValue(mapWriter.time(MappifyUtility.fieldValue));
                }
                break;
            case TIMESTAMP:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).timeStamp());
                } else {
                    fieldReader.copyAsValue(mapWriter.timeStamp(MappifyUtility.fieldValue));
                }
                break;
            case INTERVAL:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).interval());
                } else {
                    fieldReader.copyAsValue(mapWriter.interval(MappifyUtility.fieldValue));
                }
                break;
            case INTERVALDAY:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).intervalDay());
                } else {
                    fieldReader.copyAsValue(mapWriter.intervalDay(MappifyUtility.fieldValue));
                }
                break;
            case INTERVALYEAR:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).intervalYear());
                } else {
                    fieldReader.copyAsValue(mapWriter.intervalYear(MappifyUtility.fieldValue));
                }
                break;
            case FLOAT4:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).float4());
                } else {
                    fieldReader.copyAsValue(mapWriter.float4(MappifyUtility.fieldValue));
                }
                break;
            case FLOAT8:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).float8());
                } else {
                    fieldReader.copyAsValue(mapWriter.float8(MappifyUtility.fieldValue));
                }
                break;
            case BIT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).bit());
                } else {
                    fieldReader.copyAsValue(mapWriter.bit(MappifyUtility.fieldValue));
                }
                break;
            case VARCHAR:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).varChar());
                } else {
                    fieldReader.copyAsValue(mapWriter.varChar(MappifyUtility.fieldValue));
                }
                break;
            case VARBINARY:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).varBinary());
                } else {
                    fieldReader.copyAsValue(mapWriter.varBinary(MappifyUtility.fieldValue));
                }
                break;
            case MAP:
                if (valueMajorType.getMode() == TypeProtos.DataMode.REPEATED) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).map());
                } else {
                    fieldReader.copyAsValue(mapWriter.map(MappifyUtility.fieldValue));
                }
                break;
            case LIST:
                fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).list());
                break;
            default:
                throw new DrillRuntimeException(String.format("kvgen does not support input of type: %s", valueMinorType));
        }
    } catch (ClassCastException e) {
        final MaterializedField field = fieldReader.getField();
        throw new DrillRuntimeException(String.format(TYPE_MISMATCH_ERROR, field.getPath(), field.getType()));
    }
}
Also used : MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) MaterializedField(org.apache.drill.exec.record.MaterializedField) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Example 40 with MaterializedField

use of org.apache.drill.exec.record.MaterializedField in project drill by apache.

the class ProjectRecordBatch method setupNewSchema.

@Override
protected boolean setupNewSchema() throws SchemaChangeException {
    if (allocationVectors != null) {
        for (final ValueVector v : allocationVectors) {
            v.clear();
        }
    }
    this.allocationVectors = Lists.newArrayList();
    if (complexWriters != null) {
        container.clear();
    } else {
        container.zeroVectors();
    }
    final List<NamedExpression> exprs = getExpressionList();
    final ErrorCollector collector = new ErrorCollectorImpl();
    final List<TransferPair> transfers = Lists.newArrayList();
    final ClassGenerator<Projector> cg = CodeGenerator.getRoot(Projector.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
    cg.getCodeGenerator().plainJavaCapable(true);
    // Uncomment out this line to debug the generated code.
    //    cg.getCodeGenerator().saveCodeForDebugging(true);
    final IntHashSet transferFieldIds = new IntHashSet();
    final boolean isAnyWildcard = isAnyWildcard(exprs);
    final ClassifierResult result = new ClassifierResult();
    final boolean classify = isClassificationNeeded(exprs);
    for (int i = 0; i < exprs.size(); i++) {
        final NamedExpression namedExpression = exprs.get(i);
        result.clear();
        if (classify && namedExpression.getExpr() instanceof SchemaPath) {
            classifyExpr(namedExpression, incoming, result);
            if (result.isStar) {
                // The value indicates which wildcard we are processing now
                final Integer value = result.prefixMap.get(result.prefix);
                if (value != null && value.intValue() == 1) {
                    int k = 0;
                    for (final VectorWrapper<?> wrapper : incoming) {
                        final ValueVector vvIn = wrapper.getValueVector();
                        if (k > result.outputNames.size() - 1) {
                            assert false;
                        }
                        // get the renamed column names
                        final String name = result.outputNames.get(k++);
                        if (name == EMPTY_STRING) {
                            continue;
                        }
                        if (isImplicitFileColumn(vvIn)) {
                            continue;
                        }
                        final FieldReference ref = new FieldReference(name);
                        final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getAsNamePart().getName(), vvIn.getField().getType()), callBack);
                        final TransferPair tp = vvIn.makeTransferPair(vvOut);
                        transfers.add(tp);
                    }
                } else if (value != null && value.intValue() > 1) {
                    // subsequent wildcards should do a copy of incoming valuevectors
                    int k = 0;
                    for (final VectorWrapper<?> wrapper : incoming) {
                        final ValueVector vvIn = wrapper.getValueVector();
                        final SchemaPath originalPath = SchemaPath.getSimplePath(vvIn.getField().getPath());
                        if (k > result.outputNames.size() - 1) {
                            assert false;
                        }
                        // get the renamed column names
                        final String name = result.outputNames.get(k++);
                        if (name == EMPTY_STRING) {
                            continue;
                        }
                        if (isImplicitFileColumn(vvIn)) {
                            continue;
                        }
                        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(originalPath, incoming, collector, context.getFunctionRegistry());
                        if (collector.hasErrors()) {
                            throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
                        }
                        final MaterializedField outputField = MaterializedField.create(name, expr.getMajorType());
                        final ValueVector vv = container.addOrGet(outputField, callBack);
                        allocationVectors.add(vv);
                        final TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
                        final ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
                        final HoldingContainer hc = cg.addExpr(write, ClassGenerator.BlkCreateMode.TRUE_IF_BOUND);
                    }
                }
                continue;
            }
        } else {
            // For the columns which do not needed to be classified,
            // it is still necessary to ensure the output column name is unique
            result.outputNames = Lists.newArrayList();
            final String outputName = getRef(namedExpression).getRootSegment().getPath();
            addToResultMaps(outputName, result, true);
        }
        String outputName = getRef(namedExpression).getRootSegment().getPath();
        if (result != null && result.outputNames != null && result.outputNames.size() > 0) {
            boolean isMatched = false;
            for (int j = 0; j < result.outputNames.size(); j++) {
                if (!result.outputNames.get(j).equals(EMPTY_STRING)) {
                    outputName = result.outputNames.get(j);
                    isMatched = true;
                    break;
                }
            }
            if (!isMatched) {
                continue;
            }
        }
        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incoming, collector, context.getFunctionRegistry(), true, unionTypeEnabled);
        final MaterializedField outputField = MaterializedField.create(outputName, expr.getMajorType());
        if (collector.hasErrors()) {
            throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
        }
        // add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
        if (expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE && !((ValueVectorReadExpression) expr).hasReadPath() && !isAnyWildcard && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])) {
            final ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
            final TypedFieldId id = vectorRead.getFieldId();
            final ValueVector vvIn = incoming.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
            Preconditions.checkNotNull(incoming);
            final FieldReference ref = getRef(namedExpression);
            final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getAsUnescapedPath(), vectorRead.getMajorType()), callBack);
            final TransferPair tp = vvIn.makeTransferPair(vvOut);
            transfers.add(tp);
            transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);
        } else if (expr instanceof DrillFuncHolderExpr && ((DrillFuncHolderExpr) expr).getHolder().isComplexWriterFuncHolder()) {
            // Lazy initialization of the list of complex writers, if not done yet.
            if (complexWriters == null) {
                complexWriters = Lists.newArrayList();
            } else {
                complexWriters.clear();
            }
            // The reference name will be passed to ComplexWriter, used as the name of the output vector from the writer.
            ((DrillFuncHolderExpr) expr).getFieldReference(namedExpression.getRef());
            cg.addExpr(expr, ClassGenerator.BlkCreateMode.TRUE_IF_BOUND);
            if (complexFieldReferencesList == null) {
                complexFieldReferencesList = Lists.newArrayList();
            }
            // save the field reference for later for getting schema when input is empty
            complexFieldReferencesList.add(namedExpression.getRef());
        } else {
            // need to do evaluation.
            final ValueVector vector = container.addOrGet(outputField, callBack);
            allocationVectors.add(vector);
            final TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
            final boolean useSetSafe = !(vector instanceof FixedWidthVector);
            final ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);
            final HoldingContainer hc = cg.addExpr(write, ClassGenerator.BlkCreateMode.TRUE_IF_BOUND);
            // We cannot do multiple transfers from the same vector. However we still need to instantiate the output vector.
            if (expr instanceof ValueVectorReadExpression) {
                final ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
                if (!vectorRead.hasReadPath()) {
                    final TypedFieldId id = vectorRead.getFieldId();
                    final ValueVector vvIn = incoming.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
                    vvIn.makeTransferPair(vector);
                }
            }
            logger.debug("Added eval for project expression.");
        }
    }
    try {
        CodeGenerator<Projector> codeGen = cg.getCodeGenerator();
        codeGen.plainJavaCapable(true);
        // Uncomment out this line to debug the generated code.
        //      codeGen.saveCodeForDebugging(true);
        this.projector = context.getImplementationClass(codeGen);
        projector.setup(context, incoming, this, transfers);
    } catch (ClassTransformationException | IOException e) {
        throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }
    if (container.isSchemaChanged()) {
        container.buildSchema(SelectionVectorMode.NONE);
        return true;
    } else {
        return false;
    }
}
Also used : TransferPair(org.apache.drill.exec.record.TransferPair) IntHashSet(com.carrotsearch.hppc.IntHashSet) ErrorCollector(org.apache.drill.common.expression.ErrorCollector) DrillFuncHolderExpr(org.apache.drill.exec.expr.DrillFuncHolderExpr) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) HoldingContainer(org.apache.drill.exec.expr.ClassGenerator.HoldingContainer) SchemaPath(org.apache.drill.common.expression.SchemaPath) TypedFieldId(org.apache.drill.exec.record.TypedFieldId) ValueVectorWriteExpression(org.apache.drill.exec.expr.ValueVectorWriteExpression) FieldReference(org.apache.drill.common.expression.FieldReference) ClassTransformationException(org.apache.drill.exec.exception.ClassTransformationException) FixedWidthVector(org.apache.drill.exec.vector.FixedWidthVector) VectorWrapper(org.apache.drill.exec.record.VectorWrapper) MaterializedField(org.apache.drill.exec.record.MaterializedField) IOException(java.io.IOException) ValueVector(org.apache.drill.exec.vector.ValueVector) ValueVectorReadExpression(org.apache.drill.exec.expr.ValueVectorReadExpression) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) NamedExpression(org.apache.drill.common.logical.data.NamedExpression)

Aggregations

MaterializedField (org.apache.drill.exec.record.MaterializedField)67 ValueVector (org.apache.drill.exec.vector.ValueVector)29 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)20 Test (org.junit.Test)18 ExecTest (org.apache.drill.exec.ExecTest)16 MajorType (org.apache.drill.common.types.TypeProtos.MajorType)13 LogicalExpression (org.apache.drill.common.expression.LogicalExpression)11 ErrorCollector (org.apache.drill.common.expression.ErrorCollector)9 ErrorCollectorImpl (org.apache.drill.common.expression.ErrorCollectorImpl)9 TypedFieldId (org.apache.drill.exec.record.TypedFieldId)9 VectorContainer (org.apache.drill.exec.record.VectorContainer)8 IOException (java.io.IOException)7 BatchSchema (org.apache.drill.exec.record.BatchSchema)7 SchemaPath (org.apache.drill.common.expression.SchemaPath)6 NamedExpression (org.apache.drill.common.logical.data.NamedExpression)6 ValueVectorWriteExpression (org.apache.drill.exec.expr.ValueVectorWriteExpression)6 MinorType (org.apache.drill.common.types.TypeProtos.MinorType)5 NullableVarCharVector (org.apache.drill.exec.vector.NullableVarCharVector)5 DrillBuf (io.netty.buffer.DrillBuf)4 ArrayList (java.util.ArrayList)4